[ 515.779242] env[61855]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61855) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 515.779579] env[61855]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61855) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 515.779701] env[61855]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61855) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 515.780056] env[61855]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 515.868880] env[61855]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61855) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 515.879224] env[61855]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61855) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 516.020799] env[61855]: INFO nova.virt.driver [None req-1bda47af-946f-49b5-8c90-bd50bcc415fb None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 516.092211] env[61855]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.092411] env[61855]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.092476] env[61855]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61855) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 519.338054] env[61855]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-70622866-d7c1-411c-959d-6512b08ffd0c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.353804] env[61855]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61855) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 519.353962] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-50b47dea-2f24-466b-ab38-0aa43a703c28 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.385611] env[61855]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 523bb. [ 519.385735] env[61855]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.293s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.386346] env[61855]: INFO nova.virt.vmwareapi.driver [None req-1bda47af-946f-49b5-8c90-bd50bcc415fb None None] VMware vCenter version: 7.0.3 [ 519.390036] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94acba4-559d-400c-9ece-0427897a9523 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.407502] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91afba2b-37de-4803-8b9e-7b9ca1d44cdf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.413299] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4731fdc-a7e3-4f66-b428-f339aaddab4f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.419785] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1af5870-31cf-4393-bc1b-5f149052cdd5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.432811] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c79781-8dbe-4766-b96b-05014bc62b48 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.438631] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee4fab9-3abc-40d0-8377-5bd5cb36a6df {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.468575] env[61855]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-f5895c5f-7509-4875-8547-d87a76e528e7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.473411] env[61855]: DEBUG nova.virt.vmwareapi.driver [None req-1bda47af-946f-49b5-8c90-bd50bcc415fb None None] Extension org.openstack.compute already exists. {{(pid=61855) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 519.475984] env[61855]: INFO nova.compute.provider_config [None req-1bda47af-946f-49b5-8c90-bd50bcc415fb None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 519.494961] env[61855]: DEBUG nova.context [None req-1bda47af-946f-49b5-8c90-bd50bcc415fb None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),ed31e382-d74b-4e8c-85e0-572b48ce5144(cell1) {{(pid=61855) load_cells /opt/stack/nova/nova/context.py:464}} [ 519.496908] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.497138] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.497845] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.498260] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Acquiring lock "ed31e382-d74b-4e8c-85e0-572b48ce5144" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.498454] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Lock "ed31e382-d74b-4e8c-85e0-572b48ce5144" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.499450] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Lock "ed31e382-d74b-4e8c-85e0-572b48ce5144" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.519380] env[61855]: INFO dbcounter [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Registered counter for database nova_cell0 [ 519.527436] env[61855]: INFO dbcounter [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Registered counter for database nova_cell1 [ 519.530497] env[61855]: DEBUG oslo_db.sqlalchemy.engines [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61855) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 519.530755] env[61855]: DEBUG oslo_db.sqlalchemy.engines [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61855) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 519.535250] env[61855]: DEBUG dbcounter [-] [61855] Writer thread running {{(pid=61855) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 519.535989] env[61855]: DEBUG dbcounter [-] [61855] Writer thread running {{(pid=61855) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 519.539135] env[61855]: ERROR nova.db.main.api [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 519.539135] env[61855]: result = function(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 519.539135] env[61855]: return func(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 519.539135] env[61855]: result = fn(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 519.539135] env[61855]: return f(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 519.539135] env[61855]: return db.service_get_minimum_version(context, binaries) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 519.539135] env[61855]: _check_db_access() [ 519.539135] env[61855]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 519.539135] env[61855]: stacktrace = ''.join(traceback.format_stack()) [ 519.539135] env[61855]: [ 519.539135] env[61855]: ERROR nova.db.main.api [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 519.539135] env[61855]: result = function(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 519.539135] env[61855]: return func(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 519.539135] env[61855]: result = fn(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 519.539135] env[61855]: return f(*args, **kwargs) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 519.539135] env[61855]: return db.service_get_minimum_version(context, binaries) [ 519.539135] env[61855]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 519.539135] env[61855]: _check_db_access() [ 519.539135] env[61855]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 519.539135] env[61855]: stacktrace = ''.join(traceback.format_stack()) [ 519.539135] env[61855]: [ 519.539846] env[61855]: WARNING nova.objects.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 519.539846] env[61855]: WARNING nova.objects.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Failed to get minimum service version for cell ed31e382-d74b-4e8c-85e0-572b48ce5144 [ 519.540077] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Acquiring lock "singleton_lock" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.540186] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Acquired lock "singleton_lock" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.540436] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Releasing lock "singleton_lock" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.540754] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Full set of CONF: {{(pid=61855) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 519.540896] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ******************************************************************************** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 519.541035] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] Configuration options gathered from: {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 519.541202] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 519.541409] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 519.541530] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ================================================================================ {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 519.541745] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] allow_resize_to_same_host = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.541915] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] arq_binding_timeout = 300 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.542059] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] backdoor_port = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.542190] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] backdoor_socket = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.542351] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] block_device_allocate_retries = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.542513] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] block_device_allocate_retries_interval = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.542680] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cert = self.pem {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.542844] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.543016] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute_monitors = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.543189] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] config_dir = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.543356] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] config_drive_format = iso9660 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.543497] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.543663] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] config_source = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.543827] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] console_host = devstack {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.543990] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] control_exchange = nova {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.544165] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cpu_allocation_ratio = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.544322] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] daemon = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.544485] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] debug = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.544669] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] default_access_ip_network_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.544851] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] default_availability_zone = nova {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.545017] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] default_ephemeral_format = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.545179] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] default_green_pool_size = 1000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.545408] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.545571] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] default_schedule_zone = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.545726] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] disk_allocation_ratio = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.545885] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] enable_new_services = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.546072] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] enabled_apis = ['osapi_compute'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.546236] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] enabled_ssl_apis = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.546394] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] flat_injected = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.546553] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] force_config_drive = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.546708] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] force_raw_images = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.546872] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] graceful_shutdown_timeout = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.547040] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] heal_instance_info_cache_interval = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.547253] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] host = cpu-1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.547443] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.547618] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.547780] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.548010] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.548183] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instance_build_timeout = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.548342] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instance_delete_interval = 300 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.548544] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instance_format = [instance: %(uuid)s] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.548778] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instance_name_template = instance-%08x {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.548892] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instance_usage_audit = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.549070] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instance_usage_audit_period = month {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.549239] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.549404] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.549576] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] internal_service_availability_zone = internal {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.549728] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] key = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.549886] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] live_migration_retry_count = 30 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.550056] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_config_append = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.550225] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.550385] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_dir = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.550541] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.550689] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_options = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.550827] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_rotate_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.550989] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_rotate_interval_type = days {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.551166] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] log_rotation_type = none {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.551294] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.551421] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.551623] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.551790] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.551918] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.552097] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] long_rpc_timeout = 1800 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.552259] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] max_concurrent_builds = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.552415] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] max_concurrent_live_migrations = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.552570] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] max_concurrent_snapshots = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.552723] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] max_local_block_devices = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.552878] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] max_logfile_count = 30 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.553042] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] max_logfile_size_mb = 200 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.553201] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] maximum_instance_delete_attempts = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.553366] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] metadata_listen = 0.0.0.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.553530] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] metadata_listen_port = 8775 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.553697] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] metadata_workers = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.553854] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] migrate_max_retries = -1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.554138] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] mkisofs_cmd = genisoimage {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.554376] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.554516] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] my_ip = 10.180.1.21 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.554680] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] network_allocate_retries = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.554857] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.555034] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.555200] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] osapi_compute_listen_port = 8774 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.555366] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] osapi_compute_unique_server_name_scope = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.555533] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] osapi_compute_workers = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.555692] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] password_length = 12 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.555852] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] periodic_enable = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.556027] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] periodic_fuzzy_delay = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.556189] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] pointer_model = usbtablet {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.556353] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] preallocate_images = none {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.556515] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] publish_errors = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.556665] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] pybasedir = /opt/stack/nova {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.556849] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ram_allocation_ratio = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.557019] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] rate_limit_burst = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.557188] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] rate_limit_except_level = CRITICAL {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.557346] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] rate_limit_interval = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.557533] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] reboot_timeout = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.557699] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] reclaim_instance_interval = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.557855] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] record = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.558031] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] reimage_timeout_per_gb = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.558200] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] report_interval = 120 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.558357] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] rescue_timeout = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.558528] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] reserved_host_cpus = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.558701] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] reserved_host_disk_mb = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.558857] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] reserved_host_memory_mb = 512 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.559023] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] reserved_huge_pages = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.559187] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] resize_confirm_window = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.559344] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] resize_fs_using_block_device = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.559503] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] resume_guests_state_on_host_boot = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.559670] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.559830] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] rpc_response_timeout = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.559987] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] run_external_periodic_tasks = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.560165] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] running_deleted_instance_action = reap {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.560328] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.560485] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] running_deleted_instance_timeout = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.560644] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler_instance_sync_interval = 120 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.560806] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_down_time = 720 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.560973] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] servicegroup_driver = db {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.561144] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] shelved_offload_time = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.561309] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] shelved_poll_interval = 3600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.561474] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] shutdown_timeout = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.561647] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] source_is_ipv6 = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.561783] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ssl_only = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.562035] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.562205] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] sync_power_state_interval = 600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.562364] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] sync_power_state_pool_size = 1000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.562532] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] syslog_log_facility = LOG_USER {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.562691] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] tempdir = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.562850] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] timeout_nbd = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.563025] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] transport_url = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.563190] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] update_resources_interval = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.563349] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] use_cow_images = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.563508] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] use_eventlog = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.563668] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] use_journal = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.563826] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] use_json = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.563983] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] use_rootwrap_daemon = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.564152] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] use_stderr = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.564308] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] use_syslog = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.564462] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vcpu_pin_set = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.564630] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plugging_is_fatal = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.564795] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plugging_timeout = 300 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.564956] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] virt_mkfs = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.565129] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] volume_usage_poll_interval = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.565286] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] watch_log_file = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.565454] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] web = /usr/share/spice-html5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 519.565634] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_concurrency.disable_process_locking = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.565931] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.566125] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.566294] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.566468] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.566647] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.566812] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.566992] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.auth_strategy = keystone {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.567172] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.compute_link_prefix = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.567344] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.567544] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.dhcp_domain = novalocal {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.567721] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.enable_instance_password = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.567889] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.glance_link_prefix = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.568066] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.568245] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.568411] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.instance_list_per_project_cells = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.568606] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.list_records_by_skipping_down_cells = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.568787] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.local_metadata_per_cell = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.568957] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.max_limit = 1000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.569141] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.metadata_cache_expiration = 15 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.569327] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.neutron_default_tenant_id = default {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.569500] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.use_neutron_default_nets = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.569673] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.569837] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.570011] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.570190] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.570359] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.vendordata_dynamic_targets = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.570528] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.vendordata_jsonfile_path = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.570713] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.570905] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.backend = dogpile.cache.memcached {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.571083] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.backend_argument = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.571254] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.config_prefix = cache.oslo {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.571422] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.dead_timeout = 60.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.571587] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.debug_cache_backend = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.571760] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.enable_retry_client = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.571905] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.enable_socket_keepalive = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.572086] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.enabled = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.572250] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.enforce_fips_mode = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.572415] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.expiration_time = 600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.572579] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.hashclient_retry_attempts = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.572745] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.572907] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_dead_retry = 300 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.573076] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_password = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.573244] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.573410] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.573575] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_pool_maxsize = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.573736] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.573899] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_sasl_enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.574088] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.574259] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.574422] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.memcache_username = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.574591] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.proxies = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.574751] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.redis_password = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.574920] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.575106] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.575278] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.redis_server = localhost:6379 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.575446] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.redis_socket_timeout = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.575605] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.redis_username = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.575766] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.retry_attempts = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.575929] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.retry_delay = 0.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.576105] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.socket_keepalive_count = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.576268] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.socket_keepalive_idle = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.576427] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.socket_keepalive_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.576591] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.tls_allowed_ciphers = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.576749] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.tls_cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.576906] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.tls_certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.577077] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.tls_enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.577239] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cache.tls_keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.577434] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.577618] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.auth_type = password {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.577786] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.577963] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.578143] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.578311] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.578477] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.cross_az_attach = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.578672] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.debug = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.578839] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.endpoint_template = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.579016] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.http_retries = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.579187] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.579348] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.579522] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.os_region_name = RegionOne {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.579692] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.579854] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cinder.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.580034] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.580202] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.cpu_dedicated_set = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.580362] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.cpu_shared_set = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.580532] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.image_type_exclude_list = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.580701] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.580865] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.581039] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.581205] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.581374] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.581539] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.resource_provider_association_refresh = 300 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.581703] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.581874] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.shutdown_retry_interval = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.582052] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.582234] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] conductor.workers = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.582410] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] console.allowed_origins = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.582575] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] console.ssl_ciphers = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.582748] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] console.ssl_minimum_version = default {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.582918] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] consoleauth.enforce_session_timeout = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.583099] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] consoleauth.token_ttl = 600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.583271] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.583432] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.583598] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.583758] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.connect_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.583915] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.connect_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.584084] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.endpoint_override = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.584252] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.584413] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.584575] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.max_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.584735] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.min_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.584893] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.region_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.585062] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.retriable_status_codes = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.585224] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.service_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.585395] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.service_type = accelerator {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.585555] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.585714] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.status_code_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.585876] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.status_code_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.586044] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.586230] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.586391] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] cyborg.version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.586570] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.backend = sqlalchemy {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.586744] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.connection = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.586911] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.connection_debug = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.587092] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.connection_parameters = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.587259] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.connection_recycle_time = 3600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.587450] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.connection_trace = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.587621] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.db_inc_retry_interval = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.587790] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.db_max_retries = 20 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.588048] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.db_max_retry_interval = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.588148] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.db_retry_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.588284] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.max_overflow = 50 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.588445] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.max_pool_size = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.588636] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.max_retries = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.588816] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.588977] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.mysql_wsrep_sync_wait = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.589149] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.pool_timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.589312] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.retry_interval = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.589470] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.slave_connection = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.589633] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.sqlite_synchronous = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.589794] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] database.use_db_reconnect = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.589971] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.backend = sqlalchemy {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.590153] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.connection = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.590318] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.connection_debug = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.590488] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.connection_parameters = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.590655] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.connection_recycle_time = 3600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.590820] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.connection_trace = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.590982] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.db_inc_retry_interval = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.591199] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.db_max_retries = 20 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.591369] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.db_max_retry_interval = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.591536] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.db_retry_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.591702] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.max_overflow = 50 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.591864] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.max_pool_size = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.592037] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.max_retries = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.592782] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.592782] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.592782] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.pool_timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.592782] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.retry_interval = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.592941] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.slave_connection = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.592977] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] api_database.sqlite_synchronous = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.593160] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] devices.enabled_mdev_types = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.593341] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.593521] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.593681] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ephemeral_storage_encryption.enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.593844] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.594014] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.api_servers = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.594182] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.594343] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.594517] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.594673] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.connect_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.594824] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.connect_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.594982] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.debug = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.595193] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.default_trusted_certificate_ids = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.595367] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.enable_certificate_validation = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.595535] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.enable_rbd_download = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.595695] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.endpoint_override = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.595863] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.596036] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.596202] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.max_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.596360] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.min_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.596524] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.num_retries = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.596695] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.rbd_ceph_conf = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.596858] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.rbd_connect_timeout = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.597036] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.rbd_pool = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.597209] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.rbd_user = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.597370] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.region_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.597558] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.retriable_status_codes = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.597729] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.service_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.597899] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.service_type = image {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.598073] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.598236] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.status_code_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.598398] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.status_code_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.598581] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.598776] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.598943] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.verify_glance_signatures = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.599139] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] glance.version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.599322] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] guestfs.debug = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.599499] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] mks.enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.599852] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.600056] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] image_cache.manager_interval = 2400 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.600234] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] image_cache.precache_concurrency = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.600408] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] image_cache.remove_unused_base_images = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.600580] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.600749] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.600927] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] image_cache.subdirectory_name = _base {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.601119] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.api_max_retries = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.601286] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.api_retry_interval = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.601447] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.601610] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.auth_type = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.601768] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.601926] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.602102] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.602270] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.conductor_group = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.602429] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.connect_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.602591] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.connect_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.602751] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.endpoint_override = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.602916] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.603105] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.603282] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.max_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.603446] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.min_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.603615] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.peer_list = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.603776] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.region_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.603935] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.retriable_status_codes = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.604113] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.serial_console_state_timeout = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.604278] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.service_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.604451] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.service_type = baremetal {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.604615] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.shard = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.604780] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.604940] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.status_code_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.605109] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.status_code_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.605270] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.605449] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.605610] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ironic.version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.605791] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.605965] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] key_manager.fixed_key = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.606162] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.606324] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.barbican_api_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.606482] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.barbican_endpoint = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.606653] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.barbican_endpoint_type = public {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.606810] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.barbican_region_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.606968] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.607165] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.607337] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.607525] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.607691] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.607855] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.number_of_retries = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.608028] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.retry_delay = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.608198] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.send_service_user_token = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.608358] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.608531] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.608714] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.verify_ssl = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.608874] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican.verify_ssl_path = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.609057] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.609226] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.auth_type = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.609385] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.609543] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.609709] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.609868] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.610035] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.610201] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.610356] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] barbican_service_user.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.610523] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.approle_role_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.610681] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.approle_secret_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.610839] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.610996] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.611209] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.611380] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.611540] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.611715] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.kv_mountpoint = secret {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.611875] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.kv_path = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.612048] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.kv_version = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.612213] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.namespace = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.612370] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.root_token_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.612533] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.612692] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.ssl_ca_crt_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.612849] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.613016] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.use_ssl = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.613191] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.613360] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.613522] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.auth_type = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.613683] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.613846] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.614012] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.614177] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.connect_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.614333] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.connect_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.614491] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.endpoint_override = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.614655] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.614812] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.614970] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.max_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.615164] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.min_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.615333] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.region_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.615495] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.retriable_status_codes = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.615656] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.service_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.615827] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.service_type = identity {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.615992] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.616165] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.status_code_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.616326] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.status_code_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.616483] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.616661] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.616819] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] keystone.version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.617027] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.connection_uri = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.617196] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.cpu_mode = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.617361] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.617560] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.cpu_models = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.617740] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.cpu_power_governor_high = performance {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.617910] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.618136] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.cpu_power_management = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.618255] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.618419] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.device_detach_attempts = 8 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.618612] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.device_detach_timeout = 20 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.618780] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.disk_cachemodes = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.618942] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.disk_prefix = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.619144] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.enabled_perf_events = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.619322] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.file_backed_memory = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.619490] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.gid_maps = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.619653] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.hw_disk_discard = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.619811] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.hw_machine_type = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.619982] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.images_rbd_ceph_conf = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.620162] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.620327] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.620496] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.images_rbd_glance_store_name = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.620667] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.images_rbd_pool = rbd {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.620838] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.images_type = default {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.620999] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.images_volume_group = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.621179] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.inject_key = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.621344] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.inject_partition = -2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.621511] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.inject_password = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.621674] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.iscsi_iface = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.621835] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.iser_use_multipath = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.622008] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.622178] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.622346] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_downtime = 500 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.622508] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.622672] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.622833] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_inbound_addr = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.622998] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.623206] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.623388] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_scheme = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.623570] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_timeout_action = abort {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.623739] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_tunnelled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.623900] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_uri = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.624075] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.live_migration_with_native_tls = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.624242] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.max_queues = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.624409] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.624642] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.624809] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.nfs_mount_options = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.625117] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.625300] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.625471] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.625636] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.625805] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.625970] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.num_pcie_ports = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.626151] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.626320] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.pmem_namespaces = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.626483] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.quobyte_client_cfg = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.626764] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.626938] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.627134] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.627312] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.627506] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rbd_secret_uuid = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.627677] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rbd_user = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.627842] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.628022] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.628189] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rescue_image_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.628351] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rescue_kernel_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.628529] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rescue_ramdisk_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.628713] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.628879] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.rx_queue_size = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.629059] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.smbfs_mount_options = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.629334] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.629509] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.snapshot_compression = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.629676] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.snapshot_image_format = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.629896] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.630657] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.sparse_logical_volumes = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.630657] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.swtpm_enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.630657] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.swtpm_group = tss {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.630657] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.swtpm_user = tss {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.630812] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.sysinfo_serial = unique {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.630896] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.tb_cache_size = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.631065] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.tx_queue_size = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633058] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.uid_maps = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633058] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.use_virtio_for_bridges = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633058] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.virt_type = kvm {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633058] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.volume_clear = zero {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633058] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.volume_clear_size = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633058] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.volume_use_multipath = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633232] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.vzstorage_cache_path = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633232] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633232] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633232] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633232] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633232] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633388] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.vzstorage_mount_user = stack {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633611] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633700] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.633874] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.auth_type = password {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.634047] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.634214] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.634380] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.634538] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.connect_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.634698] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.connect_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.634869] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.default_floating_pool = public {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.635038] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.endpoint_override = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.635207] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.extension_sync_interval = 600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.635368] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.http_retries = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.635530] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.635689] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.635850] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.max_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.636035] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.636221] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.min_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.636394] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.ovs_bridge = br-int {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.636562] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.physnets = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.636733] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.region_name = RegionOne {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.636893] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.retriable_status_codes = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.637072] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.service_metadata_proxy = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.637236] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.service_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.637427] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.service_type = network {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.637594] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.637757] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.status_code_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.637915] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.status_code_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.638083] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.638266] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.638427] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] neutron.version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.638627] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] notifications.bdms_in_notifications = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.638812] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] notifications.default_level = INFO {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.638986] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] notifications.notification_format = unversioned {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.639165] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] notifications.notify_on_state_change = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.639341] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.639516] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] pci.alias = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.639686] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] pci.device_spec = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.639851] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] pci.report_in_placement = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.640032] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.640215] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.auth_type = password {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.640383] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.640544] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.640703] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.640865] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.641031] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.connect_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.641192] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.connect_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.641347] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.default_domain_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.641501] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.default_domain_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.641656] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.domain_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.641809] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.domain_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.641964] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.endpoint_override = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.642137] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.642293] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.642447] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.max_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.642602] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.min_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.642767] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.password = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.642925] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.project_domain_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.643101] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.project_domain_name = Default {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.643266] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.project_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.643440] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.project_name = service {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.643705] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.region_name = RegionOne {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.643780] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.retriable_status_codes = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.643931] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.service_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.644109] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.service_type = placement {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.644277] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.644441] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.status_code_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.644603] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.status_code_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.644763] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.system_scope = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.644921] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.645092] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.trust_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.645253] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.user_domain_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.645424] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.user_domain_name = Default {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.645581] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.user_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.645754] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.username = placement {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.645933] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.646106] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] placement.version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.646286] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.cores = 20 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.646455] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.count_usage_from_placement = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.646633] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.646811] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.injected_file_content_bytes = 10240 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.646982] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.injected_file_path_length = 255 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.647162] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.injected_files = 5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.647328] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.instances = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.647520] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.key_pairs = 100 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.647696] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.metadata_items = 128 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.647863] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.ram = 51200 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.648035] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.recheck_quota = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.648206] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.server_group_members = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.648373] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] quota.server_groups = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.648562] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.648742] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.648905] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.image_metadata_prefilter = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.649081] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.649248] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.max_attempts = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.649411] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.max_placement_results = 1000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.649577] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.649756] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.649946] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.650135] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] scheduler.workers = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.650318] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.650492] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.650672] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.650842] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.651014] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.651188] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.651352] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.651541] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.651717] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.host_subset_size = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.651925] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.652120] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.652291] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.652457] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.isolated_hosts = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.652622] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.isolated_images = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.652785] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.652950] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.653181] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.653296] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.pci_in_placement = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.653459] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.653623] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.653802] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.653945] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.654120] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.654284] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.654447] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.track_instance_changes = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.654625] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.654794] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] metrics.required = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.655014] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] metrics.weight_multiplier = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.655209] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.655379] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] metrics.weight_setting = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.655694] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.655871] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] serial_console.enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.656061] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] serial_console.port_range = 10000:20000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.656238] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.656409] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.656580] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] serial_console.serialproxy_port = 6083 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.656748] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.656924] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.auth_type = password {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.657097] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.657261] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.657459] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.657620] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.657783] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.657956] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.send_service_user_token = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.658133] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.658293] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] service_user.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.658484] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.agent_enabled = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.658645] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.658961] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.659169] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.659341] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.html5proxy_port = 6082 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.659503] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.image_compression = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.659663] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.jpeg_compression = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.659851] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.playback_compression = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.660041] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.server_listen = 127.0.0.1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.660218] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.660381] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.streaming_mode = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.660540] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] spice.zlib_compression = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.660707] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] upgrade_levels.baseapi = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.660877] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] upgrade_levels.compute = auto {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.661050] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] upgrade_levels.conductor = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.661214] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] upgrade_levels.scheduler = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.661379] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.661542] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.661703] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.661887] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.662079] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.662249] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.662409] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.662571] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.662727] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vendordata_dynamic_auth.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.662898] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.api_retry_count = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.663067] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.ca_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.663294] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.663409] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.cluster_name = testcl1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.663575] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.connection_pool_size = 10 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.663732] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.console_delay_seconds = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.663899] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.datastore_regex = ^datastore.* {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.664120] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.664295] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.host_password = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.664494] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.host_port = 443 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.664639] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.host_username = administrator@vsphere.local {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.664808] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.insecure = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.664971] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.integration_bridge = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.665148] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.maximum_objects = 100 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.665308] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.pbm_default_policy = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.665471] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.pbm_enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.665635] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.pbm_wsdl_location = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.665802] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.665964] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.serial_port_proxy_uri = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.666137] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.serial_port_service_uri = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.666305] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.task_poll_interval = 0.5 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.666477] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.use_linked_clone = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.666647] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.vnc_keymap = en-us {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.666812] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.vnc_port = 5900 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.666974] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vmware.vnc_port_total = 10000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.667171] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.auth_schemes = ['none'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.667350] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.667694] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.667887] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.668073] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.novncproxy_port = 6080 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.668258] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.server_listen = 127.0.0.1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.668433] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.668602] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.vencrypt_ca_certs = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.668763] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.vencrypt_client_cert = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.668922] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vnc.vencrypt_client_key = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.669115] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.669283] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.disable_deep_image_inspection = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.669448] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.669611] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.669797] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.669972] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.disable_rootwrap = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.670156] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.enable_numa_live_migration = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.670314] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.670478] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.670641] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.670801] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.libvirt_disable_apic = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.670959] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.671132] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.671294] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.671456] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.671619] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.671777] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.671938] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.672108] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.672269] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.672435] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.672621] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.672792] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.client_socket_timeout = 900 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.672960] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.default_pool_size = 1000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.673139] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.keep_alive = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.673308] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.max_header_line = 16384 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.673472] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.673634] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.ssl_ca_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.673795] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.ssl_cert_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.674040] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.ssl_key_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.674228] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.tcp_keepidle = 600 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.674414] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.674587] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] zvm.ca_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.674749] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] zvm.cloud_connector_url = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.675059] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.675237] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] zvm.reachable_timeout = 300 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.675419] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.enforce_new_defaults = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.675596] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.enforce_scope = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.675771] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.policy_default_rule = default {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.675953] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.676141] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.policy_file = policy.yaml {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.676314] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.676477] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.676639] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.676801] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.676962] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.677144] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.677320] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.677530] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.connection_string = messaging:// {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.677706] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.enabled = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.677881] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.es_doc_type = notification {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.678058] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.es_scroll_size = 10000 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.678235] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.es_scroll_time = 2m {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.678421] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.filter_error_trace = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.678589] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.hmac_keys = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.678770] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.sentinel_service_name = mymaster {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.678941] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.socket_timeout = 0.1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.679116] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.trace_requests = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.679281] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler.trace_sqlalchemy = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.679461] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler_jaeger.process_tags = {} {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.679625] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler_jaeger.service_name_prefix = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.679811] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] profiler_otlp.service_name_prefix = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.679997] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] remote_debug.host = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.680173] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] remote_debug.port = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.680353] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.680531] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.680723] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.680896] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.681072] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.681237] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.681398] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.681562] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.681722] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.681893] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.682067] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.682241] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.682412] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.682583] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.682755] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.682924] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.683100] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.683277] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.683440] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.683607] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.683770] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.683934] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.684139] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.684275] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.684436] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.684607] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.684767] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.684924] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.685099] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.685268] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.ssl = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.685444] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.685618] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.685780] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.685982] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.686180] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.686346] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.686537] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.686708] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_notifications.retry = -1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.686891] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.687077] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.687254] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.auth_section = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.687428] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.auth_type = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.687601] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.cafile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.687764] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.certfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.687929] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.collect_timing = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.688099] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.connect_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.688261] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.connect_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.688433] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.endpoint_id = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.688606] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.endpoint_override = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.688777] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.insecure = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.688937] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.keyfile = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.689108] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.max_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.689267] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.min_version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.689422] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.region_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.689582] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.retriable_status_codes = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.689755] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.service_name = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.689948] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.service_type = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.690129] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.split_loggers = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.690290] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.status_code_retries = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.690451] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.status_code_retry_delay = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.690610] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.timeout = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.690768] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.valid_interfaces = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.690926] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_limit.version = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.691104] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_reports.file_event_handler = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.691271] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.691429] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] oslo_reports.log_dir = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.691603] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.691764] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.691922] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.692098] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.692265] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.692424] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.692609] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.692859] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_ovs_privileged.group = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.693102] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.693289] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.693457] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.693628] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] vif_plug_ovs_privileged.user = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.693792] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.693972] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.694169] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.694332] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.694502] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.694672] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.694837] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.695010] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.695189] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.695361] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_ovs.isolate_vif = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.695533] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.695701] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.695874] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.696044] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.696211] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_vif_ovs.per_port_bridge = False {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.696379] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_brick.lock_path = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.696550] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.696712] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.696880] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] privsep_osbrick.capabilities = [21] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.697051] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] privsep_osbrick.group = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.697211] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] privsep_osbrick.helper_command = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.697377] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.697567] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.697762] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] privsep_osbrick.user = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.697911] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.698112] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] nova_sys_admin.group = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.698276] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] nova_sys_admin.helper_command = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.698444] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.698618] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.698776] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] nova_sys_admin.user = None {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 519.698908] env[61855]: DEBUG oslo_service.service [None req-b7252912-86a0-49e2-b0d9-9fc8c9f144b9 None None] ******************************************************************************** {{(pid=61855) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 519.699343] env[61855]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 519.709560] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Getting list of instances from cluster (obj){ [ 519.709560] env[61855]: value = "domain-c8" [ 519.709560] env[61855]: _type = "ClusterComputeResource" [ 519.709560] env[61855]: } {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 519.710888] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf340cc-7034-49fe-b9a4-d3050a19b4e4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.720499] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Got total of 0 instances {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 519.721037] env[61855]: WARNING nova.virt.vmwareapi.driver [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 519.721484] env[61855]: INFO nova.virt.node [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Generated node identity a9796bef-9c86-40e9-9cad-f1ac6217d1ad [ 519.721710] env[61855]: INFO nova.virt.node [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Wrote node identity a9796bef-9c86-40e9-9cad-f1ac6217d1ad to /opt/stack/data/n-cpu-1/compute_id [ 519.732990] env[61855]: WARNING nova.compute.manager [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Compute nodes ['a9796bef-9c86-40e9-9cad-f1ac6217d1ad'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 519.767370] env[61855]: INFO nova.compute.manager [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 519.787873] env[61855]: WARNING nova.compute.manager [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 519.788305] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.788547] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.788698] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.788853] env[61855]: DEBUG nova.compute.resource_tracker [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 519.789925] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967a558d-c7e7-48df-ab0f-ad70e28a5e6f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.797675] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019409d5-817b-4cdb-8d27-c048715b5652 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.811669] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61accb40-736f-43d3-a008-d7077845318d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.817724] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2400a5-996a-4d59-9672-615674a18395 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.847262] env[61855]: DEBUG nova.compute.resource_tracker [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180685MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 519.847391] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.847579] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.859192] env[61855]: WARNING nova.compute.resource_tracker [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] No compute node record for cpu-1:a9796bef-9c86-40e9-9cad-f1ac6217d1ad: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host a9796bef-9c86-40e9-9cad-f1ac6217d1ad could not be found. [ 519.871501] env[61855]: INFO nova.compute.resource_tracker [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: a9796bef-9c86-40e9-9cad-f1ac6217d1ad [ 519.924800] env[61855]: DEBUG nova.compute.resource_tracker [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 519.924981] env[61855]: DEBUG nova.compute.resource_tracker [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=110GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 520.025238] env[61855]: INFO nova.scheduler.client.report [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] [req-b77ca9fb-2fd3-4758-93c7-2158b1839d7d] Created resource provider record via placement API for resource provider with UUID a9796bef-9c86-40e9-9cad-f1ac6217d1ad and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 520.042261] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f39e244-b20c-47b8-a33d-f7a18115fe02 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.049524] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60af27a7-47f4-4241-8620-31e06d4bf900 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.078254] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c618f0-2541-4de3-b74e-bca8224fe242 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.085105] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35402921-52e3-447b-8f56-1e5d6797aa3a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.098209] env[61855]: DEBUG nova.compute.provider_tree [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 520.139617] env[61855]: DEBUG nova.scheduler.client.report [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Updated inventory for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 520.139849] env[61855]: DEBUG nova.compute.provider_tree [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Updating resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad generation from 0 to 1 during operation: update_inventory {{(pid=61855) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 520.139989] env[61855]: DEBUG nova.compute.provider_tree [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 520.200042] env[61855]: DEBUG nova.compute.provider_tree [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Updating resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad generation from 1 to 2 during operation: update_traits {{(pid=61855) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 520.218104] env[61855]: DEBUG nova.compute.resource_tracker [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 520.218355] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.371s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.218540] env[61855]: DEBUG nova.service [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Creating RPC server for service compute {{(pid=61855) start /opt/stack/nova/nova/service.py:182}} [ 520.230797] env[61855]: DEBUG nova.service [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] Join ServiceGroup membership for this service compute {{(pid=61855) start /opt/stack/nova/nova/service.py:199}} [ 520.230989] env[61855]: DEBUG nova.servicegroup.drivers.db [None req-ad16cf93-8384-4a0a-b442-5f0f95448350 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61855) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 529.536971] env[61855]: DEBUG dbcounter [-] [61855] Writing DB stats nova_cell0:SELECT=1 {{(pid=61855) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 529.538020] env[61855]: DEBUG dbcounter [-] [61855] Writing DB stats nova_cell1:SELECT=1 {{(pid=61855) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 533.235777] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.247409] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Getting list of instances from cluster (obj){ [ 533.247409] env[61855]: value = "domain-c8" [ 533.247409] env[61855]: _type = "ClusterComputeResource" [ 533.247409] env[61855]: } {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 533.248538] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29668dea-0f03-4729-bed2-9778ae3bbc09 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.257319] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Got total of 0 instances {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 533.257547] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.257876] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Getting list of instances from cluster (obj){ [ 533.257876] env[61855]: value = "domain-c8" [ 533.257876] env[61855]: _type = "ClusterComputeResource" [ 533.257876] env[61855]: } {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 533.258752] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb375e6e-2684-41ee-9709-58cc3ca89f03 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.266218] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Got total of 0 instances {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 564.413583] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquiring lock "f8c0e060-db09-4279-b39b-42549aa0614d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.413583] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Lock "f8c0e060-db09-4279-b39b-42549aa0614d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.441306] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 564.597485] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.597485] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.600127] env[61855]: INFO nova.compute.claims [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.757697] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceb8327-c435-4869-bfe6-7dbad65243ac {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.769856] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7ba109-edc6-4975-bb0c-b3872b93ea81 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.813364] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93ba1c9-5363-490c-8824-2047c8810336 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.822958] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4008247-7afe-41f5-8484-1fc3eeb668b7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.839104] env[61855]: DEBUG nova.compute.provider_tree [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.850786] env[61855]: DEBUG nova.scheduler.client.report [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 564.872225] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.872803] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 564.962267] env[61855]: DEBUG nova.compute.utils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 564.963642] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 564.963898] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 565.009815] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 565.155961] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 565.266332] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquiring lock "7b033766-5b47-417a-9d90-35b5657ceaef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.266625] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Lock "7b033766-5b47-417a-9d90-35b5657ceaef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.284629] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 565.395875] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.395875] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.404238] env[61855]: INFO nova.compute.claims [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.481800] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 565.482116] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 565.482452] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.482452] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 565.482556] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.482627] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 565.482864] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 565.483046] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 565.483513] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 565.483691] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 565.483866] env[61855]: DEBUG nova.virt.hardware [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 565.488085] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47d0555-95b8-4bf5-ad8f-f465c76d2f77 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.499739] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b50b0c-78c9-4288-8bd5-dddeabcbd0f1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.520946] env[61855]: DEBUG nova.policy [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64cb572e9a4043d7bee8bf107e7864ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4760b5677d164d468aadf03daa36a1e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 565.548339] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29677368-4e0f-458c-b061-0c839d1e8e7a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.587939] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51db4ce-ef39-4ca4-91ff-58ff9918b7f0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.603611] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41ae0db-b424-4e0d-b80e-3fb7bc71a65d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.643886] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9136f6-75d2-4bef-8d52-08fb23bec39e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.654876] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a77590-9299-40c9-ad38-a71643298f1b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.674341] env[61855]: DEBUG nova.compute.provider_tree [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.685278] env[61855]: DEBUG nova.scheduler.client.report [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 565.729185] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.334s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.729812] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 565.837631] env[61855]: DEBUG nova.compute.utils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 565.842155] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Not allocating networking since 'none' was specified. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 565.857966] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 565.935880] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquiring lock "0cacf291-e966-4c3e-8cf7-f664295c2f91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.938568] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "0cacf291-e966-4c3e-8cf7-f664295c2f91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.969674] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 566.001091] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 566.045252] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 566.046511] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 566.046511] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.046511] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 566.047366] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.047583] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 566.047822] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 566.048405] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 566.048596] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 566.048790] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 566.049030] env[61855]: DEBUG nova.virt.hardware [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 566.049892] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e1e847-eb8d-4daf-9cc7-67215a12bdd5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.069681] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7882c51-1e0b-4fe5-b6b4-bbb4a1092cbb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.078187] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.078461] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.080068] env[61855]: INFO nova.compute.claims [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.094607] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Instance VIF info [] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 566.102299] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 566.102912] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10fc3578-1bd7-4133-ac71-ed9a30066a96 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.116729] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Created folder: OpenStack in parent group-v4. [ 566.117790] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Creating folder: Project (107b0d6864cc4197a8d896551956d0e4). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 566.117790] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e31945bc-6f3f-4bf3-8ce9-b2b179ce1837 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.128078] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Created folder: Project (107b0d6864cc4197a8d896551956d0e4) in parent group-v847048. [ 566.128161] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Creating folder: Instances. Parent ref: group-v847049. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 566.128348] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5eabbbae-3fa7-4e81-b62b-bcf7ffab4cf8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.145321] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Created folder: Instances in parent group-v847049. [ 566.145321] env[61855]: DEBUG oslo.service.loopingcall [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.145321] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 566.145321] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2094fc9-2511-42d9-9c46-ae7df7d7974b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.168454] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 566.168454] env[61855]: value = "task-4302769" [ 566.168454] env[61855]: _type = "Task" [ 566.168454] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.179758] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302769, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.324789] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a2aefa-973d-47ab-a81e-646395ecbb7d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.335366] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5a7aa2-c993-45e1-a86a-60b81d21d1fb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.344090] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "d0f9134e-2797-4265-86d8-d68b5d3beb7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.344090] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "d0f9134e-2797-4265-86d8-d68b5d3beb7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.378816] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a01044-e3be-40a8-9f0c-bdc1d50f29e5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.382708] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 566.399761] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39079aef-32a5-49dc-85e0-7a82b5c6bda3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.417717] env[61855]: DEBUG nova.compute.provider_tree [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.431888] env[61855]: DEBUG nova.scheduler.client.report [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.451271] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.372s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.451913] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 566.459023] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.459252] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.461141] env[61855]: INFO nova.compute.claims [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.498291] env[61855]: DEBUG nova.compute.utils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.500596] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 566.500695] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 566.518556] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 566.633527] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce232a32-dacb-4564-bda8-2d7ba4b24363 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.642427] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f3cf46-9167-4b09-aace-a4ccfdbdeb93 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.646536] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 566.682166] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65dfa194-7490-45ae-b913-0692e27fddba {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.695328] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 566.695591] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 566.695747] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.695957] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 566.696125] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.696275] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 566.696600] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 566.696665] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 566.696789] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 566.696947] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 566.697144] env[61855]: DEBUG nova.virt.hardware [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 566.697384] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302769, 'name': CreateVM_Task, 'duration_secs': 0.389927} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.698155] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5923e0-1a24-4507-ba96-c5fe71070933 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.702237] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 566.703690] env[61855]: DEBUG oslo_vmware.service [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c2ff3f-08a8-49d2-b6d7-82663e762709 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.712337] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204b1956-c2d0-47ad-a91b-fee7ed20ac9d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.721908] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1489a46-1c46-4923-87d4-05b1565f1a85 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.727408] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.727491] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.728475] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 566.737471] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-274ef2d6-0d3e-43d7-b22b-9b31302e6e1a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.750165] env[61855]: DEBUG nova.compute.provider_tree [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.756837] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Waiting for the task: (returnval){ [ 566.756837] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52762855-b2f4-e6aa-18ab-959173d6fdbe" [ 566.756837] env[61855]: _type = "Task" [ 566.756837] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.768237] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.768617] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 566.768827] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.768914] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.769451] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 566.769595] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79b7d57a-5d5b-431e-80ed-5e984b2a970f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.773055] env[61855]: DEBUG nova.scheduler.client.report [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.783375] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 566.783375] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 566.783517] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c1abcb-1576-47d4-a438-7aa2d75f3699 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.795445] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4352699-9985-494c-b5a1-90fd7a17068e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.800537] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Waiting for the task: (returnval){ [ 566.800537] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]525c357a-3e29-c989-4cdd-42678ca9036d" [ 566.800537] env[61855]: _type = "Task" [ 566.800537] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.811960] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]525c357a-3e29-c989-4cdd-42678ca9036d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.819676] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.360s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.820597] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 566.867471] env[61855]: DEBUG nova.policy [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed2c2063794e4396bc2286fa162232bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '84ede3b346ce41ba978fe672e2bb24fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 566.903685] env[61855]: DEBUG nova.compute.utils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.905687] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 566.905687] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 566.925098] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 567.074450] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 567.119021] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.119021] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.119917] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.119917] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.120229] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.120453] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.120610] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.120770] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.120932] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.121110] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.121289] env[61855]: DEBUG nova.virt.hardware [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.122211] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a4c555-fce0-4684-9eef-36dbb7222a9a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.133271] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da745f8b-9928-465c-af6d-c8979c69dce3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.314059] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 567.314059] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Creating directory with path [datastore2] vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 567.314307] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-219c0ebf-4e21-4d81-8afd-23eef550720a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.336041] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Created directory with path [datastore2] vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 567.336140] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Fetch image to [datastore2] vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 567.337161] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 567.337161] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5710a9d-fd86-4788-abd8-a6b2309490ab {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.348932] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee33564-8e76-4428-8788-86e618ef530c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.362079] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18506be8-f8e6-41fc-9f4b-1c9f0dd74e0e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.397973] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5fe6a0-a1a1-4518-81b0-edae7f71a5f1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.404373] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cf049dbd-4a84-47c6-9bdf-cb60f45dce98 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.433027] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 567.522115] env[61855]: DEBUG oslo_vmware.rw_handles [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 567.589705] env[61855]: DEBUG nova.policy [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa4687cadf74da8879e45180bc53075', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '393b6bf5812d452485a233ff672fbf01', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 567.593303] env[61855]: DEBUG oslo_vmware.rw_handles [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 567.593514] env[61855]: DEBUG oslo_vmware.rw_handles [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 568.163342] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Successfully created port: cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 568.993438] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Successfully created port: 7f4acab6-b698-4321-b9d9-86d6154c8be0 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.001972] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.002219] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.014774] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 569.091183] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.091457] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.094608] env[61855]: INFO nova.compute.claims [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.255357] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d2e266-a4d0-4b96-977f-1dead3dbf857 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.266078] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494c346d-9527-462a-b84a-4275b4b4c36f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.301054] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bb4ab5-5d66-4ad5-b76e-62f6a3873182 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.309391] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe850e0-5211-46ce-b7b3-a077acae6da4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.325999] env[61855]: DEBUG nova.compute.provider_tree [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.336197] env[61855]: DEBUG nova.scheduler.client.report [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.367963] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.368527] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 569.439265] env[61855]: DEBUG nova.compute.utils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.439844] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 569.440412] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 569.450852] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Successfully created port: a93f63f8-15f8-4a11-b810-93697ef84d11 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.470778] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 569.580678] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 569.614168] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.614391] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.614551] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.614736] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.614884] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.616176] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.616484] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.616681] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.616862] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.617042] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.617226] env[61855]: DEBUG nova.virt.hardware [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.618178] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7203f9-e56b-4f94-af50-748f1966dce8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.630321] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fee834-fd2b-4afb-8edd-19d715ad2523 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.761338] env[61855]: DEBUG nova.policy [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df36ed0a396543e291b9a816b2976142', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2877582250f24f809aa7dadbc453b849', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 569.944444] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "6e0adc99-63ad-4cca-b300-d67dc2928324" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.944677] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.984284] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 570.102463] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.102463] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.102463] env[61855]: INFO nova.compute.claims [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.378539] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ecf86a-f0e5-4002-9161-061c1245084d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.385036] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb272049-dbdd-49d2-aa06-4a53f3dfdc25 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.428046] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7225ddc9-4580-4dca-9708-05d19e1e22b8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.436337] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baf331d-35b5-4d4c-8d56-50322ef32033 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.455138] env[61855]: DEBUG nova.compute.provider_tree [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.470936] env[61855]: DEBUG nova.scheduler.client.report [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 570.498745] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.398s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.500422] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 570.562965] env[61855]: DEBUG nova.compute.utils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 570.568639] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 570.568639] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 570.581647] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 570.697887] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 570.730860] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 570.731150] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 570.731308] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.731490] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 570.731634] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.731778] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 570.731980] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 570.733753] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 570.734027] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 570.734185] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 570.735279] env[61855]: DEBUG nova.virt.hardware [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 570.735279] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785ebfc1-e385-412c-8e4a-192134b7aa40 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.750571] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415b4196-a418-4bee-91a3-8a4fe1aa848f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.875779] env[61855]: DEBUG nova.policy [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea0722db5b2a4249ac845eca5bf6d2fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8570e4a78e8d41f4837f4965b1bb891b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 571.213400] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Successfully created port: adb272c1-e20b-4623-a13f-12aa4eafa8c2 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.368510] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Successfully updated port: cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.396037] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquiring lock "refresh_cache-f8c0e060-db09-4279-b39b-42549aa0614d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.396037] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquired lock "refresh_cache-f8c0e060-db09-4279-b39b-42549aa0614d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.396037] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.566299] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Successfully updated port: 7f4acab6-b698-4321-b9d9-86d6154c8be0 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.574759] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.584606] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquiring lock "refresh_cache-0cacf291-e966-4c3e-8cf7-f664295c2f91" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.584752] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquired lock "refresh_cache-0cacf291-e966-4c3e-8cf7-f664295c2f91" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.585053] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.907661] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.059611] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Successfully created port: e74a738b-e127-4742-bb96-e83698e75141 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.401502] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Successfully updated port: a93f63f8-15f8-4a11-b810-93697ef84d11 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 573.420488] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "refresh_cache-d0f9134e-2797-4265-86d8-d68b5d3beb7a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.420488] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired lock "refresh_cache-d0f9134e-2797-4265-86d8-d68b5d3beb7a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.420488] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 573.461198] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Updating instance_info_cache with network_info: [{"id": "cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea", "address": "fa:16:3e:21:a5:95", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcee0f1ca-c0", "ovs_interfaceid": "cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.484890] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Releasing lock "refresh_cache-f8c0e060-db09-4279-b39b-42549aa0614d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.484890] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Instance network_info: |[{"id": "cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea", "address": "fa:16:3e:21:a5:95", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcee0f1ca-c0", "ovs_interfaceid": "cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 573.485227] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:a5:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 573.511028] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Creating folder: Project (4760b5677d164d468aadf03daa36a1e6). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 573.512302] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6303cb15-de65-4133-81ba-8dc461294c6a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.526038] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Created folder: Project (4760b5677d164d468aadf03daa36a1e6) in parent group-v847048. [ 573.526038] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Creating folder: Instances. Parent ref: group-v847052. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 573.526038] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d09b6f8-6d57-443e-93c5-c1213079ee45 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.539494] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Created folder: Instances in parent group-v847052. [ 573.539760] env[61855]: DEBUG oslo.service.loopingcall [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.539953] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 573.540183] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02ffbc6e-5b80-4192-85f7-3fef28e21f55 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.563198] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 573.563198] env[61855]: value = "task-4302772" [ 573.563198] env[61855]: _type = "Task" [ 573.563198] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.573752] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302772, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.849202] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.866143] env[61855]: DEBUG nova.compute.manager [req-e2ce0d6f-2bfc-481b-b63d-3c25d4ec92e5 req-60da5d7d-5909-4dc8-a5db-ee7ce41a8176 service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Received event network-vif-plugged-cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 573.866143] env[61855]: DEBUG oslo_concurrency.lockutils [req-e2ce0d6f-2bfc-481b-b63d-3c25d4ec92e5 req-60da5d7d-5909-4dc8-a5db-ee7ce41a8176 service nova] Acquiring lock "f8c0e060-db09-4279-b39b-42549aa0614d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.866143] env[61855]: DEBUG oslo_concurrency.lockutils [req-e2ce0d6f-2bfc-481b-b63d-3c25d4ec92e5 req-60da5d7d-5909-4dc8-a5db-ee7ce41a8176 service nova] Lock "f8c0e060-db09-4279-b39b-42549aa0614d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.866143] env[61855]: DEBUG oslo_concurrency.lockutils [req-e2ce0d6f-2bfc-481b-b63d-3c25d4ec92e5 req-60da5d7d-5909-4dc8-a5db-ee7ce41a8176 service nova] Lock "f8c0e060-db09-4279-b39b-42549aa0614d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.866807] env[61855]: DEBUG nova.compute.manager [req-e2ce0d6f-2bfc-481b-b63d-3c25d4ec92e5 req-60da5d7d-5909-4dc8-a5db-ee7ce41a8176 service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] No waiting events found dispatching network-vif-plugged-cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 573.866807] env[61855]: WARNING nova.compute.manager [req-e2ce0d6f-2bfc-481b-b63d-3c25d4ec92e5 req-60da5d7d-5909-4dc8-a5db-ee7ce41a8176 service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Received unexpected event network-vif-plugged-cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea for instance with vm_state building and task_state spawning. [ 573.942815] env[61855]: DEBUG nova.compute.manager [req-2a8b3200-71ce-4fe3-a85b-c8f25e19f7d8 req-8111de32-4855-436c-abe4-515ab436e68b service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Received event network-vif-plugged-7f4acab6-b698-4321-b9d9-86d6154c8be0 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 573.942815] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a8b3200-71ce-4fe3-a85b-c8f25e19f7d8 req-8111de32-4855-436c-abe4-515ab436e68b service nova] Acquiring lock "0cacf291-e966-4c3e-8cf7-f664295c2f91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.942815] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a8b3200-71ce-4fe3-a85b-c8f25e19f7d8 req-8111de32-4855-436c-abe4-515ab436e68b service nova] Lock "0cacf291-e966-4c3e-8cf7-f664295c2f91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.942815] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a8b3200-71ce-4fe3-a85b-c8f25e19f7d8 req-8111de32-4855-436c-abe4-515ab436e68b service nova] Lock "0cacf291-e966-4c3e-8cf7-f664295c2f91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.943148] env[61855]: DEBUG nova.compute.manager [req-2a8b3200-71ce-4fe3-a85b-c8f25e19f7d8 req-8111de32-4855-436c-abe4-515ab436e68b service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] No waiting events found dispatching network-vif-plugged-7f4acab6-b698-4321-b9d9-86d6154c8be0 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 573.943148] env[61855]: WARNING nova.compute.manager [req-2a8b3200-71ce-4fe3-a85b-c8f25e19f7d8 req-8111de32-4855-436c-abe4-515ab436e68b service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Received unexpected event network-vif-plugged-7f4acab6-b698-4321-b9d9-86d6154c8be0 for instance with vm_state building and task_state spawning. [ 574.075518] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302772, 'name': CreateVM_Task, 'duration_secs': 0.45944} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.075518] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 574.100614] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.100614] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.100893] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 574.101195] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98a25dc7-e037-4ccd-9748-b179a4f50741 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.106659] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Waiting for the task: (returnval){ [ 574.106659] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5264cfc3-709c-8fc9-0037-c7596a2bab0f" [ 574.106659] env[61855]: _type = "Task" [ 574.106659] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.118335] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5264cfc3-709c-8fc9-0037-c7596a2bab0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.618161] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.619282] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 574.619374] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.742402] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Updating instance_info_cache with network_info: [{"id": "7f4acab6-b698-4321-b9d9-86d6154c8be0", "address": "fa:16:3e:50:82:63", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f4acab6-b6", "ovs_interfaceid": "7f4acab6-b698-4321-b9d9-86d6154c8be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.773671] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Releasing lock "refresh_cache-0cacf291-e966-4c3e-8cf7-f664295c2f91" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.773963] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Instance network_info: |[{"id": "7f4acab6-b698-4321-b9d9-86d6154c8be0", "address": "fa:16:3e:50:82:63", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f4acab6-b6", "ovs_interfaceid": "7f4acab6-b698-4321-b9d9-86d6154c8be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 574.774380] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:82:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f4acab6-b698-4321-b9d9-86d6154c8be0', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.782419] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Creating folder: Project (84ede3b346ce41ba978fe672e2bb24fd). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.783017] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b87c27c1-7590-452d-9cec-9c9fe656b9db {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.795338] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Created folder: Project (84ede3b346ce41ba978fe672e2bb24fd) in parent group-v847048. [ 574.795525] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Creating folder: Instances. Parent ref: group-v847055. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.795787] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea480bb7-6452-444e-b3f1-0a815788fb67 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.804753] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Created folder: Instances in parent group-v847055. [ 574.804980] env[61855]: DEBUG oslo.service.loopingcall [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.805185] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 574.805407] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f11b05f0-f9ef-4d15-b7a2-ee7096649f86 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.827149] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.827149] env[61855]: value = "task-4302775" [ 574.827149] env[61855]: _type = "Task" [ 574.827149] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.836176] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302775, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.337223] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.337483] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.346228] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302775, 'name': CreateVM_Task, 'duration_secs': 0.334864} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.346228] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 575.346881] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.347210] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.347573] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 575.347945] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d0499f1-2d27-48c6-9465-d2b4fbd7ac94 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.353435] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Waiting for the task: (returnval){ [ 575.353435] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]521a83a6-6917-efcc-aa5d-4e5abad1dc88" [ 575.353435] env[61855]: _type = "Task" [ 575.353435] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.354682] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 575.366559] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]521a83a6-6917-efcc-aa5d-4e5abad1dc88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.401439] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Updating instance_info_cache with network_info: [{"id": "a93f63f8-15f8-4a11-b810-93697ef84d11", "address": "fa:16:3e:71:1b:20", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa93f63f8-15", "ovs_interfaceid": "a93f63f8-15f8-4a11-b810-93697ef84d11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.430678] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Releasing lock "refresh_cache-d0f9134e-2797-4265-86d8-d68b5d3beb7a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.431373] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Instance network_info: |[{"id": "a93f63f8-15f8-4a11-b810-93697ef84d11", "address": "fa:16:3e:71:1b:20", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa93f63f8-15", "ovs_interfaceid": "a93f63f8-15f8-4a11-b810-93697ef84d11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 575.435391] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:1b:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a93f63f8-15f8-4a11-b810-93697ef84d11', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 575.442868] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Creating folder: Project (393b6bf5812d452485a233ff672fbf01). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 575.444234] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55db8071-6f5d-4583-a825-3a6ac5205848 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.462031] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Created folder: Project (393b6bf5812d452485a233ff672fbf01) in parent group-v847048. [ 575.462031] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Creating folder: Instances. Parent ref: group-v847058. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 575.462031] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dde7b50d-0c32-48dd-9eb7-5136d4e6f298 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.473757] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Created folder: Instances in parent group-v847058. [ 575.473757] env[61855]: DEBUG oslo.service.loopingcall [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.473882] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 575.474354] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89975030-f39d-4859-849f-27ad0db0db59 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.493213] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.493213] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.495042] env[61855]: INFO nova.compute.claims [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 575.502522] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 575.502522] env[61855]: value = "task-4302778" [ 575.502522] env[61855]: _type = "Task" [ 575.502522] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.513026] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302778, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.728951] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98767a6e-0634-48ee-9015-d689fd66f1b5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.741033] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5811be8f-9937-4411-84b1-c6f49b10213e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.780881] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2daaff38-9b11-4580-9492-e691264fa534 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.789553] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e741acc-168c-4e5a-8570-4172bf51ec2c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.806224] env[61855]: DEBUG nova.compute.provider_tree [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.819694] env[61855]: DEBUG nova.scheduler.client.report [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 575.835617] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.344s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.836199] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 575.871889] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.872198] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 575.872595] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.922758] env[61855]: DEBUG nova.compute.utils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.926202] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 575.926746] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 575.934128] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.934128] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.934313] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 575.934630] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 575.940750] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 575.960036] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 575.960036] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 575.960036] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 575.960036] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 575.960036] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 575.960378] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 575.960378] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 575.960378] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 575.964019] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.964019] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.964019] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.964019] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.964019] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.964019] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.964370] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 575.964370] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.979884] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.979964] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.980201] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.980357] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 575.981585] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2e7902-b278-4783-a43a-750764f1d72d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.992244] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f6db1e-411f-47a3-9774-e4233a8992f9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.014244] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232888b3-fc46-4171-810b-2278a261084e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.027032] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302778, 'name': CreateVM_Task, 'duration_secs': 0.492798} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.029079] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 576.029773] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.029940] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.030881] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 576.032226] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa960c3-76e5-44fc-b8e3-600e0475e79c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.037127] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c9deb3c-51ed-4f37-99b6-227ebbd11fc7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.073716] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180669MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 576.073805] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.073971] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.075989] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for the task: (returnval){ [ 576.075989] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]523eaddc-a82d-7ccf-f60c-3d1dd3433f3d" [ 576.075989] env[61855]: _type = "Task" [ 576.075989] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.090531] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.090531] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 576.090766] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.093574] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 576.143634] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 576.143866] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 576.144058] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.144904] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 576.144904] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.144904] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 576.144904] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 576.145098] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 576.145221] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 576.145369] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 576.145530] env[61855]: DEBUG nova.virt.hardware [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 576.146776] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc874335-3f23-4657-98ba-66ab13f2612e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.159416] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7bed30-1c75-4ce4-a1fb-a75f643c1bad {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.186929] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f8c0e060-db09-4279-b39b-42549aa0614d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 576.187143] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 7b033766-5b47-417a-9d90-35b5657ceaef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 576.187282] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0cacf291-e966-4c3e-8cf7-f664295c2f91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 576.187423] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d0f9134e-2797-4265-86d8-d68b5d3beb7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 576.187544] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance fa375b5c-bf96-4f57-a97c-73ef45bbef75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 576.187661] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e0adc99-63ad-4cca-b300-d67dc2928324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 576.187776] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 576.188014] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 576.188190] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=110GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 576.192564] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Successfully updated port: adb272c1-e20b-4623-a13f-12aa4eafa8c2 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 576.219928] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "refresh_cache-fa375b5c-bf96-4f57-a97c-73ef45bbef75" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.219928] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquired lock "refresh_cache-fa375b5c-bf96-4f57-a97c-73ef45bbef75" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.219928] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 576.372284] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695e69a9-a472-4dec-a9b0-bb49621fce0c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.382934] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dac445d-7048-445b-81d7-54fc6837eeca {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.393051] env[61855]: DEBUG nova.policy [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c3adb89e5434731907e149ec47c796b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed74679ad9b8430e8e1591ef5e75b898', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 576.423637] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f73f66-8c86-400f-bd48-be18078fc6d9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.435020] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b3c477-1011-4fe5-8fd3-838d3a882d6a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.451961] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.472895] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.503437] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 576.503657] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.430s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.591250] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.905824] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Updating instance_info_cache with network_info: [{"id": "adb272c1-e20b-4623-a13f-12aa4eafa8c2", "address": "fa:16:3e:7d:11:22", "network": {"id": "08651dec-a87e-414b-b005-555bef8a06b8", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1003918394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2877582250f24f809aa7dadbc453b849", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb272c1-e2", "ovs_interfaceid": "adb272c1-e20b-4623-a13f-12aa4eafa8c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.933953] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Releasing lock "refresh_cache-fa375b5c-bf96-4f57-a97c-73ef45bbef75" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.934681] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Instance network_info: |[{"id": "adb272c1-e20b-4623-a13f-12aa4eafa8c2", "address": "fa:16:3e:7d:11:22", "network": {"id": "08651dec-a87e-414b-b005-555bef8a06b8", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1003918394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2877582250f24f809aa7dadbc453b849", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb272c1-e2", "ovs_interfaceid": "adb272c1-e20b-4623-a13f-12aa4eafa8c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 577.934810] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:11:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'adb272c1-e20b-4623-a13f-12aa4eafa8c2', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 577.945770] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Creating folder: Project (2877582250f24f809aa7dadbc453b849). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 577.945770] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51ed0451-a167-4ee5-9c4f-aea8d47de910 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.955859] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Created folder: Project (2877582250f24f809aa7dadbc453b849) in parent group-v847048. [ 577.955859] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Creating folder: Instances. Parent ref: group-v847061. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 577.955993] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa329665-6588-4f86-9897-6200810cbdd3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.966370] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Created folder: Instances in parent group-v847061. [ 577.966620] env[61855]: DEBUG oslo.service.loopingcall [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.966804] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 577.967018] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e6011d3e-973a-49ad-8290-8978b7fe7fc4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.993169] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 577.993169] env[61855]: value = "task-4302781" [ 577.993169] env[61855]: _type = "Task" [ 577.993169] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.006507] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302781, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.034820] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Received event network-vif-plugged-a93f63f8-15f8-4a11-b810-93697ef84d11 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 578.035088] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Acquiring lock "d0f9134e-2797-4265-86d8-d68b5d3beb7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.035325] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Lock "d0f9134e-2797-4265-86d8-d68b5d3beb7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.035546] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Lock "d0f9134e-2797-4265-86d8-d68b5d3beb7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.036441] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] No waiting events found dispatching network-vif-plugged-a93f63f8-15f8-4a11-b810-93697ef84d11 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 578.036441] env[61855]: WARNING nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Received unexpected event network-vif-plugged-a93f63f8-15f8-4a11-b810-93697ef84d11 for instance with vm_state building and task_state spawning. [ 578.036441] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Received event network-changed-7f4acab6-b698-4321-b9d9-86d6154c8be0 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 578.036441] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Refreshing instance network info cache due to event network-changed-7f4acab6-b698-4321-b9d9-86d6154c8be0. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 578.036441] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Acquiring lock "refresh_cache-0cacf291-e966-4c3e-8cf7-f664295c2f91" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.036651] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Acquired lock "refresh_cache-0cacf291-e966-4c3e-8cf7-f664295c2f91" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.036651] env[61855]: DEBUG nova.network.neutron [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Refreshing network info cache for port 7f4acab6-b698-4321-b9d9-86d6154c8be0 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 578.101515] env[61855]: DEBUG nova.compute.manager [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Received event network-changed-cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 578.101607] env[61855]: DEBUG nova.compute.manager [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Refreshing instance network info cache due to event network-changed-cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 578.101846] env[61855]: DEBUG oslo_concurrency.lockutils [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] Acquiring lock "refresh_cache-f8c0e060-db09-4279-b39b-42549aa0614d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.102065] env[61855]: DEBUG oslo_concurrency.lockutils [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] Acquired lock "refresh_cache-f8c0e060-db09-4279-b39b-42549aa0614d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.102116] env[61855]: DEBUG nova.network.neutron [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Refreshing network info cache for port cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 578.212146] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "e21722cc-672b-4f8a-9f78-e50ac83071a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.212665] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.227053] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 578.297509] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.297876] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.299875] env[61855]: INFO nova.compute.claims [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.512238] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302781, 'name': CreateVM_Task, 'duration_secs': 0.320979} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.516430] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Successfully updated port: e74a738b-e127-4742-bb96-e83698e75141 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 578.518950] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 578.518950] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.518950] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.519238] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 578.519537] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72eba0b-4785-4b8f-9ca5-c556a2d1d15d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.525958] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Waiting for the task: (returnval){ [ 578.525958] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5296dced-e5de-3215-8a7e-c54f4ee0b56d" [ 578.525958] env[61855]: _type = "Task" [ 578.525958] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.533363] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d879fad-5930-435a-9c7e-46731ae30211 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.540540] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5296dced-e5de-3215-8a7e-c54f4ee0b56d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.541039] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "refresh_cache-6e0adc99-63ad-4cca-b300-d67dc2928324" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.541192] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquired lock "refresh_cache-6e0adc99-63ad-4cca-b300-d67dc2928324" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.541378] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 578.551139] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf0130a-2217-4c77-a224-8dda8fd4ae7d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.593198] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c405ae-0e37-4cb5-9a9f-1c27421e3e22 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.600622] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c15774-56d6-48bc-8b6a-2753b0ee4950 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.621196] env[61855]: DEBUG nova.compute.provider_tree [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.642333] env[61855]: DEBUG nova.scheduler.client.report [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.669953] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.372s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.670164] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 578.717349] env[61855]: DEBUG nova.compute.utils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 578.718847] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 578.718996] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 578.740943] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 578.783785] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.870645] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 578.919865] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 578.919865] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 578.919865] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.921632] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 578.921632] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.922337] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 578.924765] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 578.924765] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 578.924765] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 578.924765] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 578.924765] env[61855]: DEBUG nova.virt.hardware [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 578.925194] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84816f10-56a9-41f7-acd1-2048478434b5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.934384] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f13f7a-52e6-47ae-98e6-725fd9f899c8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.991248] env[61855]: DEBUG nova.policy [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c3adb89e5434731907e149ec47c796b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed74679ad9b8430e8e1591ef5e75b898', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 579.041495] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.041936] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.041991] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.103995] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Successfully created port: cf6f417f-7674-4783-a3f1-84a524aeda44 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 579.967431] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Updating instance_info_cache with network_info: [{"id": "e74a738b-e127-4742-bb96-e83698e75141", "address": "fa:16:3e:b6:79:d7", "network": {"id": "ef381f85-81bb-4282-9bbe-236cd54bb0be", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2095601959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8570e4a78e8d41f4837f4965b1bb891b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape74a738b-e1", "ovs_interfaceid": "e74a738b-e127-4742-bb96-e83698e75141", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.991187] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Releasing lock "refresh_cache-6e0adc99-63ad-4cca-b300-d67dc2928324" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.991520] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Instance network_info: |[{"id": "e74a738b-e127-4742-bb96-e83698e75141", "address": "fa:16:3e:b6:79:d7", "network": {"id": "ef381f85-81bb-4282-9bbe-236cd54bb0be", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2095601959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8570e4a78e8d41f4837f4965b1bb891b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape74a738b-e1", "ovs_interfaceid": "e74a738b-e127-4742-bb96-e83698e75141", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 579.992188] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:79:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e74a738b-e127-4742-bb96-e83698e75141', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 580.005661] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Creating folder: Project (8570e4a78e8d41f4837f4965b1bb891b). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 580.005661] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd24e3da-b206-4a19-b0d2-d24bd906b6d9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.017091] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Created folder: Project (8570e4a78e8d41f4837f4965b1bb891b) in parent group-v847048. [ 580.017751] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Creating folder: Instances. Parent ref: group-v847064. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 580.017751] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82a270f5-7358-4878-8f4f-5bd2dbe00869 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.026243] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Created folder: Instances in parent group-v847064. [ 580.027058] env[61855]: DEBUG oslo.service.loopingcall [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 580.027058] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 580.027058] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91e8eb1a-c460-4f3d-a311-f70a407f8cf0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.046072] env[61855]: DEBUG nova.compute.manager [req-fd33c800-d253-4833-b980-d3b5f545ae3f req-f8452aca-ffdd-4fab-923a-68f5863fb4ac service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Received event network-vif-plugged-e74a738b-e127-4742-bb96-e83698e75141 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 580.046288] env[61855]: DEBUG oslo_concurrency.lockutils [req-fd33c800-d253-4833-b980-d3b5f545ae3f req-f8452aca-ffdd-4fab-923a-68f5863fb4ac service nova] Acquiring lock "6e0adc99-63ad-4cca-b300-d67dc2928324-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.046501] env[61855]: DEBUG oslo_concurrency.lockutils [req-fd33c800-d253-4833-b980-d3b5f545ae3f req-f8452aca-ffdd-4fab-923a-68f5863fb4ac service nova] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.046662] env[61855]: DEBUG oslo_concurrency.lockutils [req-fd33c800-d253-4833-b980-d3b5f545ae3f req-f8452aca-ffdd-4fab-923a-68f5863fb4ac service nova] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.046853] env[61855]: DEBUG nova.compute.manager [req-fd33c800-d253-4833-b980-d3b5f545ae3f req-f8452aca-ffdd-4fab-923a-68f5863fb4ac service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] No waiting events found dispatching network-vif-plugged-e74a738b-e127-4742-bb96-e83698e75141 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 580.048307] env[61855]: WARNING nova.compute.manager [req-fd33c800-d253-4833-b980-d3b5f545ae3f req-f8452aca-ffdd-4fab-923a-68f5863fb4ac service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Received unexpected event network-vif-plugged-e74a738b-e127-4742-bb96-e83698e75141 for instance with vm_state building and task_state spawning. [ 580.053948] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 580.053948] env[61855]: value = "task-4302784" [ 580.053948] env[61855]: _type = "Task" [ 580.053948] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.066278] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302784, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.177392] env[61855]: DEBUG nova.network.neutron [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Updated VIF entry in instance network info cache for port cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 580.177445] env[61855]: DEBUG nova.network.neutron [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Updating instance_info_cache with network_info: [{"id": "cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea", "address": "fa:16:3e:21:a5:95", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcee0f1ca-c0", "ovs_interfaceid": "cee0f1ca-c0e7-4b0d-bb22-bdc50dddc3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.200392] env[61855]: DEBUG oslo_concurrency.lockutils [req-3f600885-96bf-43ad-a74e-544bb5e72ede req-fac070ab-af5a-4eff-830f-ae2376593adc service nova] Releasing lock "refresh_cache-f8c0e060-db09-4279-b39b-42549aa0614d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.228765] env[61855]: DEBUG nova.network.neutron [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Updated VIF entry in instance network info cache for port 7f4acab6-b698-4321-b9d9-86d6154c8be0. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 580.230559] env[61855]: DEBUG nova.network.neutron [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Updating instance_info_cache with network_info: [{"id": "7f4acab6-b698-4321-b9d9-86d6154c8be0", "address": "fa:16:3e:50:82:63", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f4acab6-b6", "ovs_interfaceid": "7f4acab6-b698-4321-b9d9-86d6154c8be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.242639] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Releasing lock "refresh_cache-0cacf291-e966-4c3e-8cf7-f664295c2f91" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.242755] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Received event network-changed-a93f63f8-15f8-4a11-b810-93697ef84d11 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 580.243364] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Refreshing instance network info cache due to event network-changed-a93f63f8-15f8-4a11-b810-93697ef84d11. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 580.243364] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Acquiring lock "refresh_cache-d0f9134e-2797-4265-86d8-d68b5d3beb7a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.243612] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Acquired lock "refresh_cache-d0f9134e-2797-4265-86d8-d68b5d3beb7a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.243744] env[61855]: DEBUG nova.network.neutron [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Refreshing network info cache for port a93f63f8-15f8-4a11-b810-93697ef84d11 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 580.566381] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302784, 'name': CreateVM_Task} progress is 99%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.994791] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Successfully created port: bcd260eb-75dd-410f-be40-26e5ce9b8fa1 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.074165] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302784, 'name': CreateVM_Task, 'duration_secs': 0.550438} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.074165] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 581.075805] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.080022] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.080022] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 581.080022] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6632cd87-746c-4bc4-b294-6540a97ddc67 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.085270] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Waiting for the task: (returnval){ [ 581.085270] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5257a3bb-2c52-14da-8987-5ce2edb8387a" [ 581.085270] env[61855]: _type = "Task" [ 581.085270] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.096018] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5257a3bb-2c52-14da-8987-5ce2edb8387a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.439970] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "311d492c-0093-4d64-a56f-80fce95b809a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.441638] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "311d492c-0093-4d64-a56f-80fce95b809a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.482383] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 581.565299] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.565636] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.569185] env[61855]: INFO nova.compute.claims [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.597987] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.598662] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 581.598662] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.814420] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb02d6b3-0b52-4680-8b19-a3f92ccbb35d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.824588] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9571d997-bf64-420b-9d03-d8a21f756315 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.870021] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8598759-989d-4ecc-85c2-a54bf8b37559 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.877565] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da850291-2e13-4100-b2b6-6180412ea990 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.894565] env[61855]: DEBUG nova.compute.provider_tree [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.910611] env[61855]: DEBUG nova.scheduler.client.report [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 581.937781] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.372s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.938550] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 582.000448] env[61855]: DEBUG nova.compute.utils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.001792] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 582.001968] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 582.017475] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 582.122236] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 582.153203] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 582.153458] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 582.153618] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.153799] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 582.153947] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.154591] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 582.154854] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 582.155079] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 582.155210] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 582.155378] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 582.155552] env[61855]: DEBUG nova.virt.hardware [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 582.157034] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251eac00-44a2-49bc-b277-263f0999037d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.169637] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b239f2c-0bfc-46c5-8cb6-507f6f1292cd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.524246] env[61855]: DEBUG nova.network.neutron [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Updated VIF entry in instance network info cache for port a93f63f8-15f8-4a11-b810-93697ef84d11. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 582.525561] env[61855]: DEBUG nova.network.neutron [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Updating instance_info_cache with network_info: [{"id": "a93f63f8-15f8-4a11-b810-93697ef84d11", "address": "fa:16:3e:71:1b:20", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa93f63f8-15", "ovs_interfaceid": "a93f63f8-15f8-4a11-b810-93697ef84d11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.539386] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Releasing lock "refresh_cache-d0f9134e-2797-4265-86d8-d68b5d3beb7a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.540444] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Received event network-vif-plugged-adb272c1-e20b-4623-a13f-12aa4eafa8c2 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 582.541913] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Acquiring lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.541913] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.541913] env[61855]: DEBUG oslo_concurrency.lockutils [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.541913] env[61855]: DEBUG nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] No waiting events found dispatching network-vif-plugged-adb272c1-e20b-4623-a13f-12aa4eafa8c2 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 582.542050] env[61855]: WARNING nova.compute.manager [req-995316e5-0df5-4316-b4c2-6f05b6994ec9 req-e813d2e1-5700-4338-a21f-d2e420b8084f service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Received unexpected event network-vif-plugged-adb272c1-e20b-4623-a13f-12aa4eafa8c2 for instance with vm_state building and task_state spawning. [ 582.656232] env[61855]: DEBUG nova.policy [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4c87d826bb4e441aa5867618dc1b6fe0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bd98747e4cd4ee895df45a226cf4fd6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 583.517019] env[61855]: DEBUG nova.compute.manager [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Received event network-changed-adb272c1-e20b-4623-a13f-12aa4eafa8c2 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 583.518281] env[61855]: DEBUG nova.compute.manager [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Refreshing instance network info cache due to event network-changed-adb272c1-e20b-4623-a13f-12aa4eafa8c2. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 583.518550] env[61855]: DEBUG oslo_concurrency.lockutils [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] Acquiring lock "refresh_cache-fa375b5c-bf96-4f57-a97c-73ef45bbef75" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.518694] env[61855]: DEBUG oslo_concurrency.lockutils [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] Acquired lock "refresh_cache-fa375b5c-bf96-4f57-a97c-73ef45bbef75" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.518854] env[61855]: DEBUG nova.network.neutron [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Refreshing network info cache for port adb272c1-e20b-4623-a13f-12aa4eafa8c2 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 584.047432] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Successfully updated port: bcd260eb-75dd-410f-be40-26e5ce9b8fa1 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 584.064293] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "refresh_cache-e21722cc-672b-4f8a-9f78-e50ac83071a5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.064818] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired lock "refresh_cache-e21722cc-672b-4f8a-9f78-e50ac83071a5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.064818] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 584.170402] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.362129] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Successfully updated port: cf6f417f-7674-4783-a3f1-84a524aeda44 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 584.368542] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "refresh_cache-4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.368854] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired lock "refresh_cache-4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.369444] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 584.564899] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Updating instance_info_cache with network_info: [{"id": "bcd260eb-75dd-410f-be40-26e5ce9b8fa1", "address": "fa:16:3e:5e:78:55", "network": {"id": "baa96ebe-023e-4452-a5e7-1d5dfd6dd00c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1049479949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed74679ad9b8430e8e1591ef5e75b898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcd260eb-75", "ovs_interfaceid": "bcd260eb-75dd-410f-be40-26e5ce9b8fa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.594023] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Releasing lock "refresh_cache-e21722cc-672b-4f8a-9f78-e50ac83071a5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.594023] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Instance network_info: |[{"id": "bcd260eb-75dd-410f-be40-26e5ce9b8fa1", "address": "fa:16:3e:5e:78:55", "network": {"id": "baa96ebe-023e-4452-a5e7-1d5dfd6dd00c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1049479949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed74679ad9b8430e8e1591ef5e75b898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcd260eb-75", "ovs_interfaceid": "bcd260eb-75dd-410f-be40-26e5ce9b8fa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 584.594544] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:78:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcd260eb-75dd-410f-be40-26e5ce9b8fa1', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 584.607594] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating folder: Project (ed74679ad9b8430e8e1591ef5e75b898). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 584.608640] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.611236] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6621caf6-cf73-4e1a-8da4-f055e36caf35 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.619386] env[61855]: DEBUG nova.compute.manager [req-96b081e0-b61b-42d7-b0c6-1046d57dfd78 req-d7c429cf-9023-42e2-86b6-6bdc13326e63 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Received event network-vif-plugged-cf6f417f-7674-4783-a3f1-84a524aeda44 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 584.619698] env[61855]: DEBUG oslo_concurrency.lockutils [req-96b081e0-b61b-42d7-b0c6-1046d57dfd78 req-d7c429cf-9023-42e2-86b6-6bdc13326e63 service nova] Acquiring lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.619826] env[61855]: DEBUG oslo_concurrency.lockutils [req-96b081e0-b61b-42d7-b0c6-1046d57dfd78 req-d7c429cf-9023-42e2-86b6-6bdc13326e63 service nova] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.620073] env[61855]: DEBUG oslo_concurrency.lockutils [req-96b081e0-b61b-42d7-b0c6-1046d57dfd78 req-d7c429cf-9023-42e2-86b6-6bdc13326e63 service nova] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.620144] env[61855]: DEBUG nova.compute.manager [req-96b081e0-b61b-42d7-b0c6-1046d57dfd78 req-d7c429cf-9023-42e2-86b6-6bdc13326e63 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] No waiting events found dispatching network-vif-plugged-cf6f417f-7674-4783-a3f1-84a524aeda44 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 584.620324] env[61855]: WARNING nova.compute.manager [req-96b081e0-b61b-42d7-b0c6-1046d57dfd78 req-d7c429cf-9023-42e2-86b6-6bdc13326e63 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Received unexpected event network-vif-plugged-cf6f417f-7674-4783-a3f1-84a524aeda44 for instance with vm_state building and task_state spawning. [ 584.623855] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Created folder: Project (ed74679ad9b8430e8e1591ef5e75b898) in parent group-v847048. [ 584.624081] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating folder: Instances. Parent ref: group-v847067. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 584.624552] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87601a07-dc92-439e-b922-87af7a1427be {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.634723] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Created folder: Instances in parent group-v847067. [ 584.635780] env[61855]: DEBUG oslo.service.loopingcall [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.635780] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 584.635780] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13ece858-d34c-402e-b209-cfed2661620a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.659857] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 584.659857] env[61855]: value = "task-4302787" [ 584.659857] env[61855]: _type = "Task" [ 584.659857] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.673114] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302787, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.171300] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302787, 'name': CreateVM_Task, 'duration_secs': 0.345754} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.171493] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 585.172459] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.172644] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.172959] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 585.173224] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27c5482e-9366-4ff7-ab28-844fe9bff74e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.178420] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 585.178420] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b2d49f-e6a9-f6af-a86e-ecf3fa054b2f" [ 585.178420] env[61855]: _type = "Task" [ 585.178420] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.187817] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b2d49f-e6a9-f6af-a86e-ecf3fa054b2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.458417] env[61855]: DEBUG nova.compute.manager [req-3a5db807-004d-4dc3-abc2-b83116c0c639 req-1ebf5500-d8a1-44f0-8a0f-be171c1aa499 service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Received event network-vif-plugged-bcd260eb-75dd-410f-be40-26e5ce9b8fa1 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 585.458642] env[61855]: DEBUG oslo_concurrency.lockutils [req-3a5db807-004d-4dc3-abc2-b83116c0c639 req-1ebf5500-d8a1-44f0-8a0f-be171c1aa499 service nova] Acquiring lock "e21722cc-672b-4f8a-9f78-e50ac83071a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.458847] env[61855]: DEBUG oslo_concurrency.lockutils [req-3a5db807-004d-4dc3-abc2-b83116c0c639 req-1ebf5500-d8a1-44f0-8a0f-be171c1aa499 service nova] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.459029] env[61855]: DEBUG oslo_concurrency.lockutils [req-3a5db807-004d-4dc3-abc2-b83116c0c639 req-1ebf5500-d8a1-44f0-8a0f-be171c1aa499 service nova] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.459211] env[61855]: DEBUG nova.compute.manager [req-3a5db807-004d-4dc3-abc2-b83116c0c639 req-1ebf5500-d8a1-44f0-8a0f-be171c1aa499 service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] No waiting events found dispatching network-vif-plugged-bcd260eb-75dd-410f-be40-26e5ce9b8fa1 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 585.461661] env[61855]: WARNING nova.compute.manager [req-3a5db807-004d-4dc3-abc2-b83116c0c639 req-1ebf5500-d8a1-44f0-8a0f-be171c1aa499 service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Received unexpected event network-vif-plugged-bcd260eb-75dd-410f-be40-26e5ce9b8fa1 for instance with vm_state building and task_state spawning. [ 585.472794] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Successfully created port: 341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 585.596132] env[61855]: DEBUG nova.network.neutron [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Updated VIF entry in instance network info cache for port adb272c1-e20b-4623-a13f-12aa4eafa8c2. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 585.596132] env[61855]: DEBUG nova.network.neutron [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Updating instance_info_cache with network_info: [{"id": "adb272c1-e20b-4623-a13f-12aa4eafa8c2", "address": "fa:16:3e:7d:11:22", "network": {"id": "08651dec-a87e-414b-b005-555bef8a06b8", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1003918394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2877582250f24f809aa7dadbc453b849", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb272c1-e2", "ovs_interfaceid": "adb272c1-e20b-4623-a13f-12aa4eafa8c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.608500] env[61855]: DEBUG oslo_concurrency.lockutils [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] Releasing lock "refresh_cache-fa375b5c-bf96-4f57-a97c-73ef45bbef75" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.609253] env[61855]: DEBUG nova.compute.manager [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Received event network-changed-e74a738b-e127-4742-bb96-e83698e75141 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 585.609253] env[61855]: DEBUG nova.compute.manager [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Refreshing instance network info cache due to event network-changed-e74a738b-e127-4742-bb96-e83698e75141. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 585.609333] env[61855]: DEBUG oslo_concurrency.lockutils [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] Acquiring lock "refresh_cache-6e0adc99-63ad-4cca-b300-d67dc2928324" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.611347] env[61855]: DEBUG oslo_concurrency.lockutils [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] Acquired lock "refresh_cache-6e0adc99-63ad-4cca-b300-d67dc2928324" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.611347] env[61855]: DEBUG nova.network.neutron [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Refreshing network info cache for port e74a738b-e127-4742-bb96-e83698e75141 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 585.690078] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.690618] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 585.690878] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.823695] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Updating instance_info_cache with network_info: [{"id": "cf6f417f-7674-4783-a3f1-84a524aeda44", "address": "fa:16:3e:c6:05:70", "network": {"id": "baa96ebe-023e-4452-a5e7-1d5dfd6dd00c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1049479949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed74679ad9b8430e8e1591ef5e75b898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf6f417f-76", "ovs_interfaceid": "cf6f417f-7674-4783-a3f1-84a524aeda44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.843892] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Releasing lock "refresh_cache-4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.846020] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Instance network_info: |[{"id": "cf6f417f-7674-4783-a3f1-84a524aeda44", "address": "fa:16:3e:c6:05:70", "network": {"id": "baa96ebe-023e-4452-a5e7-1d5dfd6dd00c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1049479949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed74679ad9b8430e8e1591ef5e75b898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf6f417f-76", "ovs_interfaceid": "cf6f417f-7674-4783-a3f1-84a524aeda44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 585.846154] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:05:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf6f417f-7674-4783-a3f1-84a524aeda44', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.858049] env[61855]: DEBUG oslo.service.loopingcall [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.859709] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 585.859709] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23a848d3-7960-4188-a428-0c4e089f38f7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.884471] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.884471] env[61855]: value = "task-4302788" [ 585.884471] env[61855]: _type = "Task" [ 585.884471] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.897718] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302788, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.396422] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302788, 'name': CreateVM_Task, 'duration_secs': 0.323054} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.397037] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 586.397769] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.397954] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.398491] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 586.398596] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1239775a-c7b0-4eab-8580-84d83ba3caf5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.408984] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 586.408984] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52736046-0494-0d0b-071b-a4064ff907a5" [ 586.408984] env[61855]: _type = "Task" [ 586.408984] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.417427] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52736046-0494-0d0b-071b-a4064ff907a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.928564] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.928564] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.928564] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.196343] env[61855]: DEBUG nova.network.neutron [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Updated VIF entry in instance network info cache for port e74a738b-e127-4742-bb96-e83698e75141. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 587.196726] env[61855]: DEBUG nova.network.neutron [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Updating instance_info_cache with network_info: [{"id": "e74a738b-e127-4742-bb96-e83698e75141", "address": "fa:16:3e:b6:79:d7", "network": {"id": "ef381f85-81bb-4282-9bbe-236cd54bb0be", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2095601959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8570e4a78e8d41f4837f4965b1bb891b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape74a738b-e1", "ovs_interfaceid": "e74a738b-e127-4742-bb96-e83698e75141", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.209898] env[61855]: DEBUG oslo_concurrency.lockutils [req-ce6fdc21-c86d-4fe5-872e-365a2c029d88 req-0ec2cc53-d54b-4eed-835c-2ce13f2bc5f6 service nova] Releasing lock "refresh_cache-6e0adc99-63ad-4cca-b300-d67dc2928324" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.655295] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "6c15201d-7373-4040-9256-84ff11fcfed2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.655572] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "6c15201d-7373-4040-9256-84ff11fcfed2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.676894] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 587.758285] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.758687] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.760975] env[61855]: INFO nova.compute.claims [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.063026] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ff7223-4728-42bc-9423-6669643ded04 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.072390] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78536a33-fe0e-451e-854b-061bcc9b75e4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.107837] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb98471-a01e-4583-99bd-bbe4d3f30a97 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.115596] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425f44a3-77ee-4754-bfe0-85c34f1ae305 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.132055] env[61855]: DEBUG nova.compute.provider_tree [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.144170] env[61855]: DEBUG nova.scheduler.client.report [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.162402] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.404s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.162902] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 588.228243] env[61855]: DEBUG nova.compute.utils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.229786] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 588.230376] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 588.249905] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 588.351974] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 588.381544] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 588.383052] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 588.383052] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.383052] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 588.383052] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.383052] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 588.383332] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 588.383606] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 588.383891] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 588.384282] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 588.384588] env[61855]: DEBUG nova.virt.hardware [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.387020] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8be3bc-f726-4f7f-a11b-64bb32c23b9c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.396339] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb4d82f-cc35-42c7-b0fc-c2d17a00bc43 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.414067] env[61855]: DEBUG nova.policy [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '195c607642b2429a917b0368bcafd4f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e30d447d38ad436995057ca02bd69839', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 588.743047] env[61855]: DEBUG nova.compute.manager [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Received event network-changed-cf6f417f-7674-4783-a3f1-84a524aeda44 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 588.743254] env[61855]: DEBUG nova.compute.manager [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Refreshing instance network info cache due to event network-changed-cf6f417f-7674-4783-a3f1-84a524aeda44. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 588.743473] env[61855]: DEBUG oslo_concurrency.lockutils [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] Acquiring lock "refresh_cache-4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.743621] env[61855]: DEBUG oslo_concurrency.lockutils [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] Acquired lock "refresh_cache-4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.743786] env[61855]: DEBUG nova.network.neutron [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Refreshing network info cache for port cf6f417f-7674-4783-a3f1-84a524aeda44 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 589.533170] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.533510] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.752190] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Successfully updated port: 341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 589.764575] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "refresh_cache-311d492c-0093-4d64-a56f-80fce95b809a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.764695] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquired lock "refresh_cache-311d492c-0093-4d64-a56f-80fce95b809a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.764844] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 589.955738] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 590.539392] env[61855]: DEBUG nova.network.neutron [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Updated VIF entry in instance network info cache for port cf6f417f-7674-4783-a3f1-84a524aeda44. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 590.541216] env[61855]: DEBUG nova.network.neutron [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Updating instance_info_cache with network_info: [{"id": "cf6f417f-7674-4783-a3f1-84a524aeda44", "address": "fa:16:3e:c6:05:70", "network": {"id": "baa96ebe-023e-4452-a5e7-1d5dfd6dd00c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1049479949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed74679ad9b8430e8e1591ef5e75b898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf6f417f-76", "ovs_interfaceid": "cf6f417f-7674-4783-a3f1-84a524aeda44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.561084] env[61855]: DEBUG oslo_concurrency.lockutils [req-74f13789-9639-4c93-bcba-6dc463c2ee5a req-23fd6a96-f1c3-4425-b6b7-4c83eb95c691 service nova] Releasing lock "refresh_cache-4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.638635] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Successfully created port: 89ecbf49-20e9-4cd8-b665-c0688ca796fd {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 590.782180] env[61855]: DEBUG nova.compute.manager [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Received event network-changed-bcd260eb-75dd-410f-be40-26e5ce9b8fa1 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 590.782355] env[61855]: DEBUG nova.compute.manager [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Refreshing instance network info cache due to event network-changed-bcd260eb-75dd-410f-be40-26e5ce9b8fa1. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 590.782568] env[61855]: DEBUG oslo_concurrency.lockutils [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] Acquiring lock "refresh_cache-e21722cc-672b-4f8a-9f78-e50ac83071a5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.782708] env[61855]: DEBUG oslo_concurrency.lockutils [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] Acquired lock "refresh_cache-e21722cc-672b-4f8a-9f78-e50ac83071a5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.782869] env[61855]: DEBUG nova.network.neutron [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Refreshing network info cache for port bcd260eb-75dd-410f-be40-26e5ce9b8fa1 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 591.144573] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Updating instance_info_cache with network_info: [{"id": "341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e", "address": "fa:16:3e:10:d0:62", "network": {"id": "556c97d8-8c96-4907-8322-d2840626f9a2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1309377829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bd98747e4cd4ee895df45a226cf4fd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap341e7e00-bd", "ovs_interfaceid": "341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.167745] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Releasing lock "refresh_cache-311d492c-0093-4d64-a56f-80fce95b809a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.168109] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Instance network_info: |[{"id": "341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e", "address": "fa:16:3e:10:d0:62", "network": {"id": "556c97d8-8c96-4907-8322-d2840626f9a2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1309377829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bd98747e4cd4ee895df45a226cf4fd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap341e7e00-bd", "ovs_interfaceid": "341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 591.168584] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:d0:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b56036cd-97ac-47f5-9089-7b38bfe99228', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 591.179639] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Creating folder: Project (0bd98747e4cd4ee895df45a226cf4fd6). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 591.180306] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0fe420c5-7a25-4e05-a992-c186505d325e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.192708] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Created folder: Project (0bd98747e4cd4ee895df45a226cf4fd6) in parent group-v847048. [ 591.193823] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Creating folder: Instances. Parent ref: group-v847071. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 591.193823] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bd566a3-dd2b-4feb-94e5-866e45620888 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.209783] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Created folder: Instances in parent group-v847071. [ 591.210065] env[61855]: DEBUG oslo.service.loopingcall [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 591.210260] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 591.210465] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45078193-26f0-4703-a60f-9a3dc736bef0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.232976] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.232976] env[61855]: value = "task-4302791" [ 591.232976] env[61855]: _type = "Task" [ 591.232976] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.243470] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302791, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.276589] env[61855]: DEBUG nova.compute.manager [req-9f797091-cfb5-449d-b536-4159475391b6 req-f6e3b364-8d3e-4ddc-b8ce-309dbb07375f service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Received event network-vif-plugged-341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 591.276824] env[61855]: DEBUG oslo_concurrency.lockutils [req-9f797091-cfb5-449d-b536-4159475391b6 req-f6e3b364-8d3e-4ddc-b8ce-309dbb07375f service nova] Acquiring lock "311d492c-0093-4d64-a56f-80fce95b809a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.277057] env[61855]: DEBUG oslo_concurrency.lockutils [req-9f797091-cfb5-449d-b536-4159475391b6 req-f6e3b364-8d3e-4ddc-b8ce-309dbb07375f service nova] Lock "311d492c-0093-4d64-a56f-80fce95b809a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.277255] env[61855]: DEBUG oslo_concurrency.lockutils [req-9f797091-cfb5-449d-b536-4159475391b6 req-f6e3b364-8d3e-4ddc-b8ce-309dbb07375f service nova] Lock "311d492c-0093-4d64-a56f-80fce95b809a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.277448] env[61855]: DEBUG nova.compute.manager [req-9f797091-cfb5-449d-b536-4159475391b6 req-f6e3b364-8d3e-4ddc-b8ce-309dbb07375f service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] No waiting events found dispatching network-vif-plugged-341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 591.277713] env[61855]: WARNING nova.compute.manager [req-9f797091-cfb5-449d-b536-4159475391b6 req-f6e3b364-8d3e-4ddc-b8ce-309dbb07375f service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Received unexpected event network-vif-plugged-341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e for instance with vm_state building and task_state spawning. [ 591.746312] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302791, 'name': CreateVM_Task, 'duration_secs': 0.349431} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.746601] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 591.747254] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.747467] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.747810] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 591.748134] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27523cad-b03d-4af5-835f-ec9ee69d357e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.753978] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Waiting for the task: (returnval){ [ 591.753978] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]520c6395-e634-1246-1bdf-374b7f6319b6" [ 591.753978] env[61855]: _type = "Task" [ 591.753978] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.764991] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]520c6395-e634-1246-1bdf-374b7f6319b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.088793] env[61855]: DEBUG nova.network.neutron [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Updated VIF entry in instance network info cache for port bcd260eb-75dd-410f-be40-26e5ce9b8fa1. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 592.089284] env[61855]: DEBUG nova.network.neutron [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Updating instance_info_cache with network_info: [{"id": "bcd260eb-75dd-410f-be40-26e5ce9b8fa1", "address": "fa:16:3e:5e:78:55", "network": {"id": "baa96ebe-023e-4452-a5e7-1d5dfd6dd00c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1049479949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed74679ad9b8430e8e1591ef5e75b898", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcd260eb-75", "ovs_interfaceid": "bcd260eb-75dd-410f-be40-26e5ce9b8fa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.099241] env[61855]: DEBUG oslo_concurrency.lockutils [req-2ab7062e-326a-4902-8ebf-c63c40555de1 req-a71a4e44-9d45-4ccd-b4cf-35cda75ecf5c service nova] Releasing lock "refresh_cache-e21722cc-672b-4f8a-9f78-e50ac83071a5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.267012] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.267378] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.267742] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.390213] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Successfully updated port: 89ecbf49-20e9-4cd8-b665-c0688ca796fd {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 593.400267] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "refresh_cache-6c15201d-7373-4040-9256-84ff11fcfed2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.400421] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquired lock "refresh_cache-6c15201d-7373-4040-9256-84ff11fcfed2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.400576] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 593.502289] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.658151] env[61855]: DEBUG nova.compute.manager [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Received event network-changed-341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 593.658437] env[61855]: DEBUG nova.compute.manager [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Refreshing instance network info cache due to event network-changed-341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 593.659223] env[61855]: DEBUG oslo_concurrency.lockutils [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] Acquiring lock "refresh_cache-311d492c-0093-4d64-a56f-80fce95b809a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.659451] env[61855]: DEBUG oslo_concurrency.lockutils [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] Acquired lock "refresh_cache-311d492c-0093-4d64-a56f-80fce95b809a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.659868] env[61855]: DEBUG nova.network.neutron [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Refreshing network info cache for port 341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 594.089307] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Updating instance_info_cache with network_info: [{"id": "89ecbf49-20e9-4cd8-b665-c0688ca796fd", "address": "fa:16:3e:cc:ef:58", "network": {"id": "efd1232d-6a19-4b1e-96b2-f502178ceb5e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2008533716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e30d447d38ad436995057ca02bd69839", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ecbf49-20", "ovs_interfaceid": "89ecbf49-20e9-4cd8-b665-c0688ca796fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.132879] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Releasing lock "refresh_cache-6c15201d-7373-4040-9256-84ff11fcfed2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.133549] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Instance network_info: |[{"id": "89ecbf49-20e9-4cd8-b665-c0688ca796fd", "address": "fa:16:3e:cc:ef:58", "network": {"id": "efd1232d-6a19-4b1e-96b2-f502178ceb5e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2008533716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e30d447d38ad436995057ca02bd69839", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ecbf49-20", "ovs_interfaceid": "89ecbf49-20e9-4cd8-b665-c0688ca796fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 594.134105] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:ef:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89ecbf49-20e9-4cd8-b665-c0688ca796fd', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 594.152381] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Creating folder: Project (e30d447d38ad436995057ca02bd69839). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.155115] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47a1da3a-b99f-4287-b93c-07a71f506d2a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.167525] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Created folder: Project (e30d447d38ad436995057ca02bd69839) in parent group-v847048. [ 594.167832] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Creating folder: Instances. Parent ref: group-v847074. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.168144] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37852f2e-65e2-4a3c-9d74-b36605fb79ad {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.181596] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Created folder: Instances in parent group-v847074. [ 594.181857] env[61855]: DEBUG oslo.service.loopingcall [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.182893] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 594.184018] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbdb64da-04a9-4f15-b124-d51f3d56fd90 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.215137] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 594.215137] env[61855]: value = "task-4302794" [ 594.215137] env[61855]: _type = "Task" [ 594.215137] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.225488] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302794, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.660238] env[61855]: DEBUG nova.network.neutron [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Updated VIF entry in instance network info cache for port 341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 594.660593] env[61855]: DEBUG nova.network.neutron [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Updating instance_info_cache with network_info: [{"id": "341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e", "address": "fa:16:3e:10:d0:62", "network": {"id": "556c97d8-8c96-4907-8322-d2840626f9a2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1309377829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bd98747e4cd4ee895df45a226cf4fd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap341e7e00-bd", "ovs_interfaceid": "341e7e00-bd9f-4fe3-bdf6-9889ebe32c7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.680671] env[61855]: DEBUG oslo_concurrency.lockutils [req-2033f610-a4a7-4aea-b78d-65f1f6689193 req-6b9ce1e3-f023-487d-aa8f-8a2b0fe5ef70 service nova] Releasing lock "refresh_cache-311d492c-0093-4d64-a56f-80fce95b809a" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.727084] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302794, 'name': CreateVM_Task, 'duration_secs': 0.33322} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.727329] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 594.728395] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.730028] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.730028] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 594.730028] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54880db9-d619-4f5d-a928-346b63cfd2a5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.738270] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Waiting for the task: (returnval){ [ 594.738270] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]523660cb-81d5-9258-846b-75f55ae31234" [ 594.738270] env[61855]: _type = "Task" [ 594.738270] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.748024] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]523660cb-81d5-9258-846b-75f55ae31234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.832121] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "3ae180bd-526d-481f-958b-ca3af96b4406" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.832443] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "3ae180bd-526d-481f-958b-ca3af96b4406" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.250309] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.250589] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 595.250871] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.860427] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.860427] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.947384] env[61855]: DEBUG nova.compute.manager [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Received event network-vif-plugged-89ecbf49-20e9-4cd8-b665-c0688ca796fd {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 596.947646] env[61855]: DEBUG oslo_concurrency.lockutils [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] Acquiring lock "6c15201d-7373-4040-9256-84ff11fcfed2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.947881] env[61855]: DEBUG oslo_concurrency.lockutils [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] Lock "6c15201d-7373-4040-9256-84ff11fcfed2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.948423] env[61855]: DEBUG oslo_concurrency.lockutils [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] Lock "6c15201d-7373-4040-9256-84ff11fcfed2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.948423] env[61855]: DEBUG nova.compute.manager [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] No waiting events found dispatching network-vif-plugged-89ecbf49-20e9-4cd8-b665-c0688ca796fd {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 596.948561] env[61855]: WARNING nova.compute.manager [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Received unexpected event network-vif-plugged-89ecbf49-20e9-4cd8-b665-c0688ca796fd for instance with vm_state building and task_state spawning. [ 596.948613] env[61855]: DEBUG nova.compute.manager [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Received event network-changed-89ecbf49-20e9-4cd8-b665-c0688ca796fd {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 596.948772] env[61855]: DEBUG nova.compute.manager [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Refreshing instance network info cache due to event network-changed-89ecbf49-20e9-4cd8-b665-c0688ca796fd. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 596.949363] env[61855]: DEBUG oslo_concurrency.lockutils [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] Acquiring lock "refresh_cache-6c15201d-7373-4040-9256-84ff11fcfed2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.949363] env[61855]: DEBUG oslo_concurrency.lockutils [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] Acquired lock "refresh_cache-6c15201d-7373-4040-9256-84ff11fcfed2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.949363] env[61855]: DEBUG nova.network.neutron [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Refreshing network info cache for port 89ecbf49-20e9-4cd8-b665-c0688ca796fd {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 597.294231] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.294231] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.406965] env[61855]: DEBUG nova.network.neutron [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Updated VIF entry in instance network info cache for port 89ecbf49-20e9-4cd8-b665-c0688ca796fd. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 597.407339] env[61855]: DEBUG nova.network.neutron [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Updating instance_info_cache with network_info: [{"id": "89ecbf49-20e9-4cd8-b665-c0688ca796fd", "address": "fa:16:3e:cc:ef:58", "network": {"id": "efd1232d-6a19-4b1e-96b2-f502178ceb5e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2008533716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e30d447d38ad436995057ca02bd69839", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89ecbf49-20", "ovs_interfaceid": "89ecbf49-20e9-4cd8-b665-c0688ca796fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.419719] env[61855]: DEBUG oslo_concurrency.lockutils [req-a91c3e40-5256-4f43-bd58-2693c0915c6c req-3d40a9bd-56ee-48a5-85bb-edd7146ff5b7 service nova] Releasing lock "refresh_cache-6c15201d-7373-4040-9256-84ff11fcfed2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.913887] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f5fe65f-3452-4b2c-a43d-27f5ec5af611 tempest-VolumesAssistedSnapshotsTest-842109476 tempest-VolumesAssistedSnapshotsTest-842109476-project-member] Acquiring lock "b979c065-ea7c-43bc-8701-fb77b4945ddf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.914232] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f5fe65f-3452-4b2c-a43d-27f5ec5af611 tempest-VolumesAssistedSnapshotsTest-842109476 tempest-VolumesAssistedSnapshotsTest-842109476-project-member] Lock "b979c065-ea7c-43bc-8701-fb77b4945ddf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.761660] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c9fc344b-a76a-4405-9606-12c92092ccd0 tempest-AttachInterfacesUnderV243Test-1665714490 tempest-AttachInterfacesUnderV243Test-1665714490-project-member] Acquiring lock "f836f443-77b5-41ae-a1c2-1ee5f80885fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.762025] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c9fc344b-a76a-4405-9606-12c92092ccd0 tempest-AttachInterfacesUnderV243Test-1665714490 tempest-AttachInterfacesUnderV243Test-1665714490-project-member] Lock "f836f443-77b5-41ae-a1c2-1ee5f80885fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.349183] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c255994d-1c8d-4a3b-aba6-efa461909568 tempest-ServersWithSpecificFlavorTestJSON-615293268 tempest-ServersWithSpecificFlavorTestJSON-615293268-project-member] Acquiring lock "d61ac621-8140-4a40-8e00-acc041e3e0b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.349501] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c255994d-1c8d-4a3b-aba6-efa461909568 tempest-ServersWithSpecificFlavorTestJSON-615293268 tempest-ServersWithSpecificFlavorTestJSON-615293268-project-member] Lock "d61ac621-8140-4a40-8e00-acc041e3e0b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.710704] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c80253ba-89ea-4c2e-867e-3c7684985db6 tempest-ServerAddressesNegativeTestJSON-547679814 tempest-ServerAddressesNegativeTestJSON-547679814-project-member] Acquiring lock "6e14a79e-f3fa-47f5-afff-8d159c6d8a88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.712559] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c80253ba-89ea-4c2e-867e-3c7684985db6 tempest-ServerAddressesNegativeTestJSON-547679814 tempest-ServerAddressesNegativeTestJSON-547679814-project-member] Lock "6e14a79e-f3fa-47f5-afff-8d159c6d8a88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.729514] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bb2fc590-8b72-4587-8cfa-5d4361c789b9 tempest-ServersNegativeTestJSON-1790298136 tempest-ServersNegativeTestJSON-1790298136-project-member] Acquiring lock "e454739d-dd42-4cdb-90bb-2bf733688af7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.729795] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bb2fc590-8b72-4587-8cfa-5d4361c789b9 tempest-ServersNegativeTestJSON-1790298136 tempest-ServersNegativeTestJSON-1790298136-project-member] Lock "e454739d-dd42-4cdb-90bb-2bf733688af7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.449657] env[61855]: DEBUG oslo_concurrency.lockutils [None req-64796851-1525-4f2a-a2ba-317b214ca030 tempest-ServerExternalEventsTest-1224379416 tempest-ServerExternalEventsTest-1224379416-project-member] Acquiring lock "83531c89-23dd-47b8-82dd-f8ec7b95dd4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.449657] env[61855]: DEBUG oslo_concurrency.lockutils [None req-64796851-1525-4f2a-a2ba-317b214ca030 tempest-ServerExternalEventsTest-1224379416 tempest-ServerExternalEventsTest-1224379416-project-member] Lock "83531c89-23dd-47b8-82dd-f8ec7b95dd4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.363137] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de874d95-09fc-456b-96a1-d61fc7389294 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "e3a48262-e571-4b12-8f5f-5c8f5f65e5eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.363430] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de874d95-09fc-456b-96a1-d61fc7389294 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "e3a48262-e571-4b12-8f5f-5c8f5f65e5eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.774408] env[61855]: WARNING oslo_vmware.rw_handles [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 616.774408] env[61855]: ERROR oslo_vmware.rw_handles [ 616.778026] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 616.778026] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 616.778026] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Copying Virtual Disk [datastore2] vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/013cb9bd-6260-4d86-9aed-252e7035d03c/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 616.779488] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af17b345-2037-46d7-9cfc-97115877171f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.792373] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Waiting for the task: (returnval){ [ 616.792373] env[61855]: value = "task-4302795" [ 616.792373] env[61855]: _type = "Task" [ 616.792373] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.802392] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Task: {'id': task-4302795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.310955] env[61855]: DEBUG oslo_vmware.exceptions [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 617.311106] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.314526] env[61855]: ERROR nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 617.314526] env[61855]: Faults: ['InvalidArgument'] [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Traceback (most recent call last): [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] yield resources [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self.driver.spawn(context, instance, image_meta, [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self._fetch_image_if_missing(context, vi) [ 617.314526] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] image_cache(vi, tmp_image_ds_loc) [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] vm_util.copy_virtual_disk( [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] session._wait_for_task(vmdk_copy_task) [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] return self.wait_for_task(task_ref) [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] return evt.wait() [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] result = hub.switch() [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.317323] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] return self.greenlet.switch() [ 617.318597] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 617.318597] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self.f(*self.args, **self.kw) [ 617.318597] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 617.318597] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] raise exceptions.translate_fault(task_info.error) [ 617.318597] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 617.318597] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Faults: ['InvalidArgument'] [ 617.318597] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] [ 617.318597] env[61855]: INFO nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Terminating instance [ 617.318597] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.318897] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 617.318897] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquiring lock "refresh_cache-7b033766-5b47-417a-9d90-35b5657ceaef" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.318897] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquired lock "refresh_cache-7b033766-5b47-417a-9d90-35b5657ceaef" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.318897] env[61855]: DEBUG nova.network.neutron [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.318897] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7689df02-fbf5-4495-8c01-e8b19280fcd6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.328477] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 617.328686] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 617.330575] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81834e81-cd92-4f58-8dd3-5b760f7d4ddf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.342955] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Waiting for the task: (returnval){ [ 617.342955] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5244b63d-1342-979c-2b63-15d3687529be" [ 617.342955] env[61855]: _type = "Task" [ 617.342955] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.351452] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5244b63d-1342-979c-2b63-15d3687529be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.372633] env[61855]: DEBUG nova.network.neutron [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.574355] env[61855]: DEBUG nova.network.neutron [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.586291] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Releasing lock "refresh_cache-7b033766-5b47-417a-9d90-35b5657ceaef" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.586804] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 617.586889] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 617.588050] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d1adfa-eeff-4f9b-80bf-7c2713c20121 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.601431] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 617.601431] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79994245-dd81-496c-bf86-84690fb21de4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.641935] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 617.641935] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 617.645927] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Deleting the datastore file [datastore2] 7b033766-5b47-417a-9d90-35b5657ceaef {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 617.646601] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-801e7591-4ce0-46c4-be6f-576ae4f3ec93 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.653772] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Waiting for the task: (returnval){ [ 617.653772] env[61855]: value = "task-4302797" [ 617.653772] env[61855]: _type = "Task" [ 617.653772] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.666999] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Task: {'id': task-4302797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.856404] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 617.856728] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Creating directory with path [datastore2] vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 617.856918] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71bedd42-bdb1-4579-9970-da84e5275f29 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.869070] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Created directory with path [datastore2] vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 617.869458] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Fetch image to [datastore2] vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 617.869458] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 617.870309] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0c5d77-b744-4500-80a7-f99eb2c87ed4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.879067] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62619835-d3ad-4184-9ca6-238f429cf83a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.899038] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d3fd5b-d5a1-41cb-9c8d-f1e1e1c5ef35 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.938011] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de34e11d-d65a-4d40-9d5d-8c20751b92cb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.944854] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-649757f9-6d75-4f81-8f65-548c3ad892b1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.978023] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 618.054824] env[61855]: DEBUG oslo_vmware.rw_handles [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 618.127904] env[61855]: DEBUG oslo_vmware.rw_handles [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 618.128130] env[61855]: DEBUG oslo_vmware.rw_handles [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 618.166401] env[61855]: DEBUG oslo_vmware.api [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Task: {'id': task-4302797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039582} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.166658] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 618.166838] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 618.167807] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 618.170122] env[61855]: INFO nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Took 0.58 seconds to destroy the instance on the hypervisor. [ 618.170122] env[61855]: DEBUG oslo.service.loopingcall [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.170122] env[61855]: DEBUG nova.compute.manager [-] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Skipping network deallocation for instance since networking was not requested. {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 618.173221] env[61855]: DEBUG nova.compute.claims [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 618.173411] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.173529] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.676456] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577cdc30-755b-4019-b0f3-cef90e6754fd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.688228] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86205da-0f1c-48b7-bd43-1060dcf1bcb6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.725455] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019d082c-9e48-40f8-a076-f9e1dcc08836 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.733428] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf0fcc0-7815-44d7-bd34-38308e685990 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.749796] env[61855]: DEBUG nova.compute.provider_tree [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.761763] env[61855]: DEBUG nova.scheduler.client.report [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.783629] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.610s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.784748] env[61855]: ERROR nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 618.784748] env[61855]: Faults: ['InvalidArgument'] [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Traceback (most recent call last): [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self.driver.spawn(context, instance, image_meta, [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self._fetch_image_if_missing(context, vi) [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] image_cache(vi, tmp_image_ds_loc) [ 618.784748] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] vm_util.copy_virtual_disk( [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] session._wait_for_task(vmdk_copy_task) [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] return self.wait_for_task(task_ref) [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] return evt.wait() [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] result = hub.switch() [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] return self.greenlet.switch() [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 618.785227] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] self.f(*self.args, **self.kw) [ 618.785687] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 618.785687] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] raise exceptions.translate_fault(task_info.error) [ 618.785687] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 618.785687] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Faults: ['InvalidArgument'] [ 618.785687] env[61855]: ERROR nova.compute.manager [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] [ 618.785687] env[61855]: DEBUG nova.compute.utils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 618.790336] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Build of instance 7b033766-5b47-417a-9d90-35b5657ceaef was re-scheduled: A specified parameter was not correct: fileType [ 618.790336] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 618.790742] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 618.790969] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquiring lock "refresh_cache-7b033766-5b47-417a-9d90-35b5657ceaef" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.791129] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Acquired lock "refresh_cache-7b033766-5b47-417a-9d90-35b5657ceaef" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.791370] env[61855]: DEBUG nova.network.neutron [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 618.851200] env[61855]: DEBUG nova.network.neutron [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.035353] env[61855]: DEBUG nova.network.neutron [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.048467] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Releasing lock "refresh_cache-7b033766-5b47-417a-9d90-35b5657ceaef" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.048986] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 619.048986] env[61855]: DEBUG nova.compute.manager [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] [instance: 7b033766-5b47-417a-9d90-35b5657ceaef] Skipping network deallocation for instance since networking was not requested. {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 619.184567] env[61855]: INFO nova.scheduler.client.report [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Deleted allocations for instance 7b033766-5b47-417a-9d90-35b5657ceaef [ 619.216448] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7d94f82e-9841-486a-96ec-9eab44e209ec tempest-ServersAdmin275Test-2075930723 tempest-ServersAdmin275Test-2075930723-project-member] Lock "7b033766-5b47-417a-9d90-35b5657ceaef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.950s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.245771] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 619.302649] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.302649] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.304854] env[61855]: INFO nova.compute.claims [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.733674] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75a1a7d-732a-4339-b041-3af26e592740 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.742019] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef879e31-b20f-4863-af35-dab683cf32c6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.773735] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bb265e-34af-4a2f-9357-5724bd713270 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.782391] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2bc459-e1d7-4767-ad85-35fdb80abb74 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.796497] env[61855]: DEBUG nova.compute.provider_tree [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.809144] env[61855]: DEBUG nova.scheduler.client.report [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.829109] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.526s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.829873] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 619.897668] env[61855]: DEBUG nova.compute.utils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.899114] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 619.899298] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 619.916758] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 620.017019] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 620.044635] env[61855]: DEBUG nova.policy [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6a68a34196c4d8c803bc56bbaf5c2a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e451677222f4beda25d0a45c2d91941', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 620.051480] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 620.052775] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 620.052775] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.052775] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 620.052970] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.053182] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 620.053458] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 620.053856] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 620.054122] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 620.054349] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 620.054593] env[61855]: DEBUG nova.virt.hardware [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.055997] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad700fc-7fb3-4257-b56e-22235db9507e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.068015] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3596abe5-63bf-4319-b0ef-327009fd04ba {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.222226] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Successfully created port: 5c688834-4716-4449-9b44-59b1900fa855 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.320094] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.320226] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.351540] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Successfully updated port: 5c688834-4716-4449-9b44-59b1900fa855 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.368496] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "refresh_cache-9b0b21f7-bdc0-488c-a7fc-234727c26b68" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.368681] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquired lock "refresh_cache-9b0b21f7-bdc0-488c-a7fc-234727c26b68" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.368842] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.441918] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.887752] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Updating instance_info_cache with network_info: [{"id": "5c688834-4716-4449-9b44-59b1900fa855", "address": "fa:16:3e:63:6d:56", "network": {"id": "b7c9dacc-42b2-4ccb-b13c-f3686eb8ec6b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1231707688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e451677222f4beda25d0a45c2d91941", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c688834-47", "ovs_interfaceid": "5c688834-4716-4449-9b44-59b1900fa855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.911432] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Releasing lock "refresh_cache-9b0b21f7-bdc0-488c-a7fc-234727c26b68" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.912028] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Instance network_info: |[{"id": "5c688834-4716-4449-9b44-59b1900fa855", "address": "fa:16:3e:63:6d:56", "network": {"id": "b7c9dacc-42b2-4ccb-b13c-f3686eb8ec6b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1231707688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e451677222f4beda25d0a45c2d91941", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c688834-47", "ovs_interfaceid": "5c688834-4716-4449-9b44-59b1900fa855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 622.912219] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:6d:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6399297e-11b6-47b0-9a9f-712bb90b6ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c688834-4716-4449-9b44-59b1900fa855', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.923992] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Creating folder: Project (4e451677222f4beda25d0a45c2d91941). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 622.924785] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c592fe2a-f0d9-42fc-aa49-fa6602adc5a5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.936432] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Created folder: Project (4e451677222f4beda25d0a45c2d91941) in parent group-v847048. [ 622.936660] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Creating folder: Instances. Parent ref: group-v847077. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 622.936908] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8ad1e4b-5314-4f3a-bd7f-384464111b6a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.950910] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Created folder: Instances in parent group-v847077. [ 622.951588] env[61855]: DEBUG oslo.service.loopingcall [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.952088] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 622.953020] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebe7191d-16e7-4b70-8e41-46ba7841cd5d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.974977] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.974977] env[61855]: value = "task-4302800" [ 622.974977] env[61855]: _type = "Task" [ 622.974977] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.983646] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302800, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.491431] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302800, 'name': CreateVM_Task} progress is 99%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.987834] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302800, 'name': CreateVM_Task, 'duration_secs': 0.549487} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.988560] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 623.988983] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.989087] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.989460] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 623.990059] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd22bd89-4e74-4e49-94af-1ecf798ddafe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.997038] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Waiting for the task: (returnval){ [ 623.997038] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52fc38ea-39f2-a6c1-6dd3-2bde5c0b2ad1" [ 623.997038] env[61855]: _type = "Task" [ 623.997038] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.008905] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52fc38ea-39f2-a6c1-6dd3-2bde5c0b2ad1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.155816] env[61855]: DEBUG nova.compute.manager [req-34be0347-312f-496f-898d-18db7be94f8e req-6a38f2c5-5891-4ad9-a36c-89a73a2ecdf9 service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Received event network-vif-plugged-5c688834-4716-4449-9b44-59b1900fa855 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 624.156806] env[61855]: DEBUG oslo_concurrency.lockutils [req-34be0347-312f-496f-898d-18db7be94f8e req-6a38f2c5-5891-4ad9-a36c-89a73a2ecdf9 service nova] Acquiring lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.156806] env[61855]: DEBUG oslo_concurrency.lockutils [req-34be0347-312f-496f-898d-18db7be94f8e req-6a38f2c5-5891-4ad9-a36c-89a73a2ecdf9 service nova] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.156806] env[61855]: DEBUG oslo_concurrency.lockutils [req-34be0347-312f-496f-898d-18db7be94f8e req-6a38f2c5-5891-4ad9-a36c-89a73a2ecdf9 service nova] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.156806] env[61855]: DEBUG nova.compute.manager [req-34be0347-312f-496f-898d-18db7be94f8e req-6a38f2c5-5891-4ad9-a36c-89a73a2ecdf9 service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] No waiting events found dispatching network-vif-plugged-5c688834-4716-4449-9b44-59b1900fa855 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 624.157023] env[61855]: WARNING nova.compute.manager [req-34be0347-312f-496f-898d-18db7be94f8e req-6a38f2c5-5891-4ad9-a36c-89a73a2ecdf9 service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Received unexpected event network-vif-plugged-5c688834-4716-4449-9b44-59b1900fa855 for instance with vm_state building and task_state spawning. [ 624.509697] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.509998] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 624.510213] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.595119] env[61855]: DEBUG nova.compute.manager [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Received event network-changed-5c688834-4716-4449-9b44-59b1900fa855 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 627.596327] env[61855]: DEBUG nova.compute.manager [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Refreshing instance network info cache due to event network-changed-5c688834-4716-4449-9b44-59b1900fa855. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 627.596575] env[61855]: DEBUG oslo_concurrency.lockutils [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] Acquiring lock "refresh_cache-9b0b21f7-bdc0-488c-a7fc-234727c26b68" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.596728] env[61855]: DEBUG oslo_concurrency.lockutils [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] Acquired lock "refresh_cache-9b0b21f7-bdc0-488c-a7fc-234727c26b68" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.596908] env[61855]: DEBUG nova.network.neutron [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Refreshing network info cache for port 5c688834-4716-4449-9b44-59b1900fa855 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 628.248834] env[61855]: DEBUG oslo_concurrency.lockutils [None req-deef5b02-bbeb-4ba1-a138-24d91920eedd tempest-ServersTestManualDisk-1103417019 tempest-ServersTestManualDisk-1103417019-project-member] Acquiring lock "0c391391-3357-41d5-995b-70accf3aa2a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.249368] env[61855]: DEBUG oslo_concurrency.lockutils [None req-deef5b02-bbeb-4ba1-a138-24d91920eedd tempest-ServersTestManualDisk-1103417019 tempest-ServersTestManualDisk-1103417019-project-member] Lock "0c391391-3357-41d5-995b-70accf3aa2a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.824365] env[61855]: DEBUG nova.network.neutron [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Updated VIF entry in instance network info cache for port 5c688834-4716-4449-9b44-59b1900fa855. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 628.824844] env[61855]: DEBUG nova.network.neutron [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Updating instance_info_cache with network_info: [{"id": "5c688834-4716-4449-9b44-59b1900fa855", "address": "fa:16:3e:63:6d:56", "network": {"id": "b7c9dacc-42b2-4ccb-b13c-f3686eb8ec6b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1231707688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e451677222f4beda25d0a45c2d91941", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c688834-47", "ovs_interfaceid": "5c688834-4716-4449-9b44-59b1900fa855", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.842961] env[61855]: DEBUG oslo_concurrency.lockutils [req-e8a64709-0d14-4c7c-9f6f-8035d7f38dfe req-266016ba-d739-4151-99fb-9dc7688dfcec service nova] Releasing lock "refresh_cache-9b0b21f7-bdc0-488c-a7fc-234727c26b68" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.211848] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2d6e0e1f-966c-4c4f-9973-29b0d4b26c43 tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] Acquiring lock "21b0c6a6-05f7-4d92-9e2e-7ec332a4337a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.211848] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2d6e0e1f-966c-4c4f-9973-29b0d4b26c43 tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] Lock "21b0c6a6-05f7-4d92-9e2e-7ec332a4337a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.417748] env[61855]: DEBUG oslo_concurrency.lockutils [None req-25bbf440-5ad5-40b3-b686-303c3c822b85 tempest-ServersAdminNegativeTestJSON-91051523 tempest-ServersAdminNegativeTestJSON-91051523-project-member] Acquiring lock "a2337e7e-3cc9-4427-99fc-b2990dca9cd0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.418030] env[61855]: DEBUG oslo_concurrency.lockutils [None req-25bbf440-5ad5-40b3-b686-303c3c822b85 tempest-ServersAdminNegativeTestJSON-91051523 tempest-ServersAdminNegativeTestJSON-91051523-project-member] Lock "a2337e7e-3cc9-4427-99fc-b2990dca9cd0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.227444] env[61855]: DEBUG oslo_concurrency.lockutils [None req-78d4bb6c-9018-4520-9cb0-8d9c23f98395 tempest-ImagesOneServerNegativeTestJSON-489246136 tempest-ImagesOneServerNegativeTestJSON-489246136-project-member] Acquiring lock "b4b12e43-e240-4782-9041-8887334e6361" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.229427] env[61855]: DEBUG oslo_concurrency.lockutils [None req-78d4bb6c-9018-4520-9cb0-8d9c23f98395 tempest-ImagesOneServerNegativeTestJSON-489246136 tempest-ImagesOneServerNegativeTestJSON-489246136-project-member] Lock "b4b12e43-e240-4782-9041-8887334e6361" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.488687] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.513074] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.924606] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.924606] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 636.924752] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 636.947830] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.947830] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.947830] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.947830] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.947830] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.948115] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.948115] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.948115] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.948115] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.948115] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 636.948300] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 636.948300] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.948300] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.356478] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8b7938dc-d9c2-4da6-be61-cb9023404eb9 tempest-ServerActionsTestOtherB-1742061393 tempest-ServerActionsTestOtherB-1742061393-project-member] Acquiring lock "5ae9308d-1e1f-49ab-aafc-022a936d2f15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.356701] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8b7938dc-d9c2-4da6-be61-cb9023404eb9 tempest-ServerActionsTestOtherB-1742061393 tempest-ServerActionsTestOtherB-1742061393-project-member] Lock "5ae9308d-1e1f-49ab-aafc-022a936d2f15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.923982] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.924451] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.924451] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.924606] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.924761] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 637.924912] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.935923] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.936126] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.936306] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.936440] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 637.937571] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8256dddb-8f59-4433-8153-0c4d8eee1c8a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.949781] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a8b40c-5ab6-447a-a836-71fbf6352795 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.964767] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113d1368-7bb0-46e1-bd71-08120e592673 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.972147] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8dc02a-1ee3-43b2-9d6a-b21a85dafb78 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.003297] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180680MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 638.003463] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.003665] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.101712] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f8c0e060-db09-4279-b39b-42549aa0614d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.101867] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0cacf291-e966-4c3e-8cf7-f664295c2f91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.101997] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d0f9134e-2797-4265-86d8-d68b5d3beb7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.102171] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance fa375b5c-bf96-4f57-a97c-73ef45bbef75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.102248] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e0adc99-63ad-4cca-b300-d67dc2928324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.102364] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.102481] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e21722cc-672b-4f8a-9f78-e50ac83071a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.102592] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 311d492c-0093-4d64-a56f-80fce95b809a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.102707] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6c15201d-7373-4040-9256-84ff11fcfed2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.102827] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 638.129738] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.161590] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.171918] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.182630] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b979c065-ea7c-43bc-8701-fb77b4945ddf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.191709] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f836f443-77b5-41ae-a1c2-1ee5f80885fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.205388] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d61ac621-8140-4a40-8e00-acc041e3e0b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.213790] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e14a79e-f3fa-47f5-afff-8d159c6d8a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.226368] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e454739d-dd42-4cdb-90bb-2bf733688af7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.237417] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 83531c89-23dd-47b8-82dd-f8ec7b95dd4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.248814] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e3a48262-e571-4b12-8f5f-5c8f5f65e5eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.259935] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.270777] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0c391391-3357-41d5-995b-70accf3aa2a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.283806] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 21b0c6a6-05f7-4d92-9e2e-7ec332a4337a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.293179] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a2337e7e-3cc9-4427-99fc-b2990dca9cd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.305289] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b4b12e43-e240-4782-9041-8887334e6361 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.317846] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 5ae9308d-1e1f-49ab-aafc-022a936d2f15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 638.318568] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 638.318841] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 638.738045] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51cf724-30d7-4d5b-a6d1-18e80416e800 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.743840] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7767f9-68ec-48d6-b43b-1dace6f47434 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.776384] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d6a74b-3dee-4bac-87a0-942d9b0ae7de {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.783963] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e2995b-0bd8-4763-aaee-f7c1305027c5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.797029] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.811018] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 638.832454] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 638.832454] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.829s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.586427] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fd456e19-9a64-4a30-babe-4f48751c4bb9 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Acquiring lock "8fb8bacf-aca4-4696-971e-559f85c002b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.586427] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fd456e19-9a64-4a30-babe-4f48751c4bb9 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "8fb8bacf-aca4-4696-971e-559f85c002b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.105698] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9ed2e9e2-6aa1-4e63-8419-aa21b0a36b27 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] Acquiring lock "3568e9ec-c87c-4831-bd20-d3cfab106e0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.106019] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9ed2e9e2-6aa1-4e63-8419-aa21b0a36b27 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] Lock "3568e9ec-c87c-4831-bd20-d3cfab106e0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.369698] env[61855]: DEBUG oslo_concurrency.lockutils [None req-229300d9-c4e9-4fe6-8c63-30ce2b849c81 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] Acquiring lock "00e8d25c-9c28-47ee-8fd7-8734df1a9a3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.369958] env[61855]: DEBUG oslo_concurrency.lockutils [None req-229300d9-c4e9-4fe6-8c63-30ce2b849c81 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] Lock "00e8d25c-9c28-47ee-8fd7-8734df1a9a3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.407917] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e16987a3-8a9b-453e-a00e-f36127d1a7dd tempest-ServersTestBootFromVolume-1706340137 tempest-ServersTestBootFromVolume-1706340137-project-member] Acquiring lock "b78cec38-feba-4723-b735-1d9afc5edadc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.408251] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e16987a3-8a9b-453e-a00e-f36127d1a7dd tempest-ServersTestBootFromVolume-1706340137 tempest-ServersTestBootFromVolume-1706340137-project-member] Lock "b78cec38-feba-4723-b735-1d9afc5edadc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.214921] env[61855]: WARNING oslo_vmware.rw_handles [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 665.214921] env[61855]: ERROR oslo_vmware.rw_handles [ 665.214921] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 665.216268] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 665.216520] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Copying Virtual Disk [datastore2] vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/4e22725b-df13-4295-bfb2-b75d4a510e5b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 665.216863] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adb66e0b-65e1-4573-9af9-57016007461f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.224493] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Waiting for the task: (returnval){ [ 665.224493] env[61855]: value = "task-4302812" [ 665.224493] env[61855]: _type = "Task" [ 665.224493] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.232626] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Task: {'id': task-4302812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.734547] env[61855]: DEBUG oslo_vmware.exceptions [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 665.734881] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.735966] env[61855]: ERROR nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 665.735966] env[61855]: Faults: ['InvalidArgument'] [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Traceback (most recent call last): [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] yield resources [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self.driver.spawn(context, instance, image_meta, [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self._fetch_image_if_missing(context, vi) [ 665.735966] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] image_cache(vi, tmp_image_ds_loc) [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] vm_util.copy_virtual_disk( [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] session._wait_for_task(vmdk_copy_task) [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] return self.wait_for_task(task_ref) [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] return evt.wait() [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] result = hub.switch() [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 665.736312] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] return self.greenlet.switch() [ 665.736626] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 665.736626] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self.f(*self.args, **self.kw) [ 665.736626] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 665.736626] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] raise exceptions.translate_fault(task_info.error) [ 665.736626] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 665.736626] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Faults: ['InvalidArgument'] [ 665.736626] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] [ 665.736626] env[61855]: INFO nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Terminating instance [ 665.737372] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.737571] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.737812] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da135dd0-ba44-42cf-9391-198f53216b0d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.740192] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 665.740384] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 665.741117] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cae6b4-06cb-4714-a203-849eaa7c600e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.747387] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 665.747594] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60c3e29f-fb17-4e0e-8397-602d29b52e81 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.749729] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.749909] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 665.750855] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba9bf0d8-6b87-439d-9b10-a87a2a7d3ae6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.755978] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Waiting for the task: (returnval){ [ 665.755978] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52d45008-dcae-f984-215d-824f9a88052c" [ 665.755978] env[61855]: _type = "Task" [ 665.755978] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.765060] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52d45008-dcae-f984-215d-824f9a88052c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.814509] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 665.814509] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 665.814509] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Deleting the datastore file [datastore2] f8c0e060-db09-4279-b39b-42549aa0614d {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 665.814509] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-531c2ccf-020b-46ee-9c3c-204236d52538 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.820289] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Waiting for the task: (returnval){ [ 665.820289] env[61855]: value = "task-4302814" [ 665.820289] env[61855]: _type = "Task" [ 665.820289] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.827982] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Task: {'id': task-4302814, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.266969] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 666.267272] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Creating directory with path [datastore2] vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 666.267612] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d87200ee-a35b-43bc-ae1b-289e81eb98e3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.278523] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Created directory with path [datastore2] vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 666.278726] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Fetch image to [datastore2] vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 666.278942] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 666.279722] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6d3de6-9f5e-45e1-89ec-47203ac36bdd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.286365] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab93b8a-5a2b-4f6a-8e0d-d1c902345485 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.295553] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11db489b-158c-48cc-ac2b-cf5d9a461a57 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.329448] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67956503-0d25-4365-845f-a63bd0ca30f2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.336621] env[61855]: DEBUG oslo_vmware.api [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Task: {'id': task-4302814, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07696} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.338061] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.338261] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 666.338435] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 666.338610] env[61855]: INFO nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 666.340446] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ec3a5660-ac2f-42d8-b10a-41bf2a1fe714 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.342372] env[61855]: DEBUG nova.compute.claims [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 666.342556] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.342767] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.364832] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 666.427562] env[61855]: DEBUG oslo_vmware.rw_handles [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 666.490029] env[61855]: DEBUG oslo_vmware.rw_handles [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 666.490029] env[61855]: DEBUG oslo_vmware.rw_handles [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 666.803700] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368664a7-d34c-4fc7-881a-321514c947cd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.811350] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15acd427-c053-4256-90b5-68a31c7c0b11 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.840783] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178552a2-aaa3-46f7-8750-130562239059 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.847576] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6087cf77-6f31-41ea-a627-030a29e2b1f1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.860965] env[61855]: DEBUG nova.compute.provider_tree [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.869380] env[61855]: DEBUG nova.scheduler.client.report [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 666.884782] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.542s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.885306] env[61855]: ERROR nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 666.885306] env[61855]: Faults: ['InvalidArgument'] [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Traceback (most recent call last): [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self.driver.spawn(context, instance, image_meta, [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self._fetch_image_if_missing(context, vi) [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] image_cache(vi, tmp_image_ds_loc) [ 666.885306] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] vm_util.copy_virtual_disk( [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] session._wait_for_task(vmdk_copy_task) [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] return self.wait_for_task(task_ref) [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] return evt.wait() [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] result = hub.switch() [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] return self.greenlet.switch() [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 666.885683] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] self.f(*self.args, **self.kw) [ 666.886081] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 666.886081] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] raise exceptions.translate_fault(task_info.error) [ 666.886081] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 666.886081] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Faults: ['InvalidArgument'] [ 666.886081] env[61855]: ERROR nova.compute.manager [instance: f8c0e060-db09-4279-b39b-42549aa0614d] [ 666.886081] env[61855]: DEBUG nova.compute.utils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 666.887701] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Build of instance f8c0e060-db09-4279-b39b-42549aa0614d was re-scheduled: A specified parameter was not correct: fileType [ 666.887701] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 666.888091] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 666.888271] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 666.888427] env[61855]: DEBUG nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 666.888600] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 667.296429] env[61855]: DEBUG nova.network.neutron [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.306495] env[61855]: INFO nova.compute.manager [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] [instance: f8c0e060-db09-4279-b39b-42549aa0614d] Took 0.42 seconds to deallocate network for instance. [ 667.412369] env[61855]: INFO nova.scheduler.client.report [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Deleted allocations for instance f8c0e060-db09-4279-b39b-42549aa0614d [ 667.445139] env[61855]: DEBUG oslo_concurrency.lockutils [None req-238ac9be-640e-49a9-b279-c7b8fc72e596 tempest-ServerDiagnosticsTest-2037318492 tempest-ServerDiagnosticsTest-2037318492-project-member] Lock "f8c0e060-db09-4279-b39b-42549aa0614d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.031s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.462134] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 667.517206] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.517206] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.517206] env[61855]: INFO nova.compute.claims [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.894266] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff03b091-2a11-46a5-a369-5e1fdab81fae {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.901825] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62f2137-9dfb-486d-8042-4a0c64d42370 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.932906] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bbdd19-4aa8-4640-897c-3cf4ccb92073 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.940438] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cc3798-114c-4a84-888b-7b9abc9bfc10 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.954093] env[61855]: DEBUG nova.compute.provider_tree [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.965896] env[61855]: DEBUG nova.scheduler.client.report [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.979531] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.464s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.980076] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 668.015396] env[61855]: DEBUG nova.compute.utils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 668.016884] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 668.017685] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 668.027881] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 668.119714] env[61855]: DEBUG nova.policy [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b56d16bd907143ba97a68e62f9793beb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f895dd916e6347cb93d293fa2ce4f292', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 668.122882] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 668.147233] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 668.147486] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 668.147644] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 668.147826] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 668.147974] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 668.148140] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 668.148346] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 668.148506] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 668.148670] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 668.148837] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 668.149052] env[61855]: DEBUG nova.virt.hardware [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 668.149912] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2815fbec-60b3-47a1-b85d-6f98c6e0e294 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.158685] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4d1ca6-40be-4b8f-b011-9a98863d4355 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.865194] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Successfully created port: f5c0549f-1061-4045-9a51-cf5f7e7e667d {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.696340] env[61855]: DEBUG nova.compute.manager [req-9db9a67b-9629-4eeb-add1-a4416fe78783 req-d10e8e9c-9f00-470c-95d0-79438657011c service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Received event network-vif-plugged-f5c0549f-1061-4045-9a51-cf5f7e7e667d {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 669.696340] env[61855]: DEBUG oslo_concurrency.lockutils [req-9db9a67b-9629-4eeb-add1-a4416fe78783 req-d10e8e9c-9f00-470c-95d0-79438657011c service nova] Acquiring lock "3ae180bd-526d-481f-958b-ca3af96b4406-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.696340] env[61855]: DEBUG oslo_concurrency.lockutils [req-9db9a67b-9629-4eeb-add1-a4416fe78783 req-d10e8e9c-9f00-470c-95d0-79438657011c service nova] Lock "3ae180bd-526d-481f-958b-ca3af96b4406-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.696496] env[61855]: DEBUG oslo_concurrency.lockutils [req-9db9a67b-9629-4eeb-add1-a4416fe78783 req-d10e8e9c-9f00-470c-95d0-79438657011c service nova] Lock "3ae180bd-526d-481f-958b-ca3af96b4406-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.696670] env[61855]: DEBUG nova.compute.manager [req-9db9a67b-9629-4eeb-add1-a4416fe78783 req-d10e8e9c-9f00-470c-95d0-79438657011c service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] No waiting events found dispatching network-vif-plugged-f5c0549f-1061-4045-9a51-cf5f7e7e667d {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 669.696842] env[61855]: WARNING nova.compute.manager [req-9db9a67b-9629-4eeb-add1-a4416fe78783 req-d10e8e9c-9f00-470c-95d0-79438657011c service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Received unexpected event network-vif-plugged-f5c0549f-1061-4045-9a51-cf5f7e7e667d for instance with vm_state building and task_state spawning. [ 669.803012] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Successfully updated port: f5c0549f-1061-4045-9a51-cf5f7e7e667d {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.817531] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "refresh_cache-3ae180bd-526d-481f-958b-ca3af96b4406" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.817684] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquired lock "refresh_cache-3ae180bd-526d-481f-958b-ca3af96b4406" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.817901] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 669.869098] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 670.095366] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Updating instance_info_cache with network_info: [{"id": "f5c0549f-1061-4045-9a51-cf5f7e7e667d", "address": "fa:16:3e:a3:18:7d", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c0549f-10", "ovs_interfaceid": "f5c0549f-1061-4045-9a51-cf5f7e7e667d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.112698] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Releasing lock "refresh_cache-3ae180bd-526d-481f-958b-ca3af96b4406" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.112698] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Instance network_info: |[{"id": "f5c0549f-1061-4045-9a51-cf5f7e7e667d", "address": "fa:16:3e:a3:18:7d", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c0549f-10", "ovs_interfaceid": "f5c0549f-1061-4045-9a51-cf5f7e7e667d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.112948] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:18:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5c0549f-1061-4045-9a51-cf5f7e7e667d', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.119739] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Creating folder: Project (f895dd916e6347cb93d293fa2ce4f292). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.120342] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-482849bf-84df-4f2c-a71c-cadbefde5f94 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.132323] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Created folder: Project (f895dd916e6347cb93d293fa2ce4f292) in parent group-v847048. [ 670.132656] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Creating folder: Instances. Parent ref: group-v847084. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.133152] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-515d8cbc-03dc-4792-ad56-25fd0a7b2be1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.143515] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Created folder: Instances in parent group-v847084. [ 670.144120] env[61855]: DEBUG oslo.service.loopingcall [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.144120] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 670.144222] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94445ec9-3f4f-49de-a300-b996ea8040b7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.163912] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.163912] env[61855]: value = "task-4302817" [ 670.163912] env[61855]: _type = "Task" [ 670.163912] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.172568] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302817, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.674704] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302817, 'name': CreateVM_Task, 'duration_secs': 0.30098} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.674704] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 670.675893] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.675893] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.676021] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 670.676248] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaeff212-ed84-4fbc-bdc2-5d7dccbe32c0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.681199] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Waiting for the task: (returnval){ [ 670.681199] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5289b786-813a-13c2-13b8-d9a476c67205" [ 670.681199] env[61855]: _type = "Task" [ 670.681199] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.690139] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5289b786-813a-13c2-13b8-d9a476c67205, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.191775] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.192096] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.192325] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.863283] env[61855]: DEBUG nova.compute.manager [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Received event network-changed-f5c0549f-1061-4045-9a51-cf5f7e7e667d {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 671.863482] env[61855]: DEBUG nova.compute.manager [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Refreshing instance network info cache due to event network-changed-f5c0549f-1061-4045-9a51-cf5f7e7e667d. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 671.863694] env[61855]: DEBUG oslo_concurrency.lockutils [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] Acquiring lock "refresh_cache-3ae180bd-526d-481f-958b-ca3af96b4406" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.863836] env[61855]: DEBUG oslo_concurrency.lockutils [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] Acquired lock "refresh_cache-3ae180bd-526d-481f-958b-ca3af96b4406" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.863995] env[61855]: DEBUG nova.network.neutron [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Refreshing network info cache for port f5c0549f-1061-4045-9a51-cf5f7e7e667d {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 672.247715] env[61855]: DEBUG nova.network.neutron [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Updated VIF entry in instance network info cache for port f5c0549f-1061-4045-9a51-cf5f7e7e667d. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 672.248075] env[61855]: DEBUG nova.network.neutron [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Updating instance_info_cache with network_info: [{"id": "f5c0549f-1061-4045-9a51-cf5f7e7e667d", "address": "fa:16:3e:a3:18:7d", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c0549f-10", "ovs_interfaceid": "f5c0549f-1061-4045-9a51-cf5f7e7e667d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.261144] env[61855]: DEBUG oslo_concurrency.lockutils [req-216d2130-1027-41f3-99ae-78c11e2b9f31 req-0dc79882-f811-4c98-b92c-69e45c3c155f service nova] Releasing lock "refresh_cache-3ae180bd-526d-481f-958b-ca3af96b4406" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.063570] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.063888] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.832151] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.925192] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.925450] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 697.925574] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 697.947908] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948078] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948214] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948341] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948465] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948586] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948709] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948826] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.948955] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.949095] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 697.949278] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 697.949754] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.949926] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.950105] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.950297] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 697.950463] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.962492] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.962667] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.962832] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.962981] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 697.964054] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a21685a-1dc6-4c9a-9e92-0b47bbd8e934 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.974316] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ed7652-665c-40cf-87ef-f256021b4cd1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.988418] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559c331f-fec9-4188-91fa-d657a9d5d7c0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.994852] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6dbcca3-0159-4d90-99cb-b0e9f20c8209 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.024074] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180676MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 698.024229] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.024428] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.101858] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0cacf291-e966-4c3e-8cf7-f664295c2f91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102032] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d0f9134e-2797-4265-86d8-d68b5d3beb7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102167] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance fa375b5c-bf96-4f57-a97c-73ef45bbef75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102294] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e0adc99-63ad-4cca-b300-d67dc2928324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102417] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102537] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e21722cc-672b-4f8a-9f78-e50ac83071a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102656] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 311d492c-0093-4d64-a56f-80fce95b809a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102791] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6c15201d-7373-4040-9256-84ff11fcfed2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.102914] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.103044] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 698.116680] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.127535] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.139353] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b979c065-ea7c-43bc-8701-fb77b4945ddf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.149346] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f836f443-77b5-41ae-a1c2-1ee5f80885fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.160508] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d61ac621-8140-4a40-8e00-acc041e3e0b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.170595] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e14a79e-f3fa-47f5-afff-8d159c6d8a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.180505] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e454739d-dd42-4cdb-90bb-2bf733688af7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.189712] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 83531c89-23dd-47b8-82dd-f8ec7b95dd4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.212741] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e3a48262-e571-4b12-8f5f-5c8f5f65e5eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.222768] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.231789] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0c391391-3357-41d5-995b-70accf3aa2a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.240509] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 21b0c6a6-05f7-4d92-9e2e-7ec332a4337a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.249162] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a2337e7e-3cc9-4427-99fc-b2990dca9cd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.258776] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b4b12e43-e240-4782-9041-8887334e6361 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.269407] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 5ae9308d-1e1f-49ab-aafc-022a936d2f15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.279188] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8fb8bacf-aca4-4696-971e-559f85c002b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.288402] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3568e9ec-c87c-4831-bd20-d3cfab106e0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.298179] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 00e8d25c-9c28-47ee-8fd7-8734df1a9a3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.307689] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b78cec38-feba-4723-b735-1d9afc5edadc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.316639] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.316848] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 698.316994] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 698.658172] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d261ee-f33b-48c3-a044-3d1fbe98b6f9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.665525] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f6f438-f7c5-4d79-9ed7-7c3c67fce663 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.696189] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b271662e-d048-4441-97fd-73af157579db {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.703164] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d46dff-71ec-4e23-8498-79e41f22945d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.715791] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.723780] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 698.741414] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 698.741680] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.717s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.716011] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.716293] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 699.924057] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.812027] env[61855]: WARNING oslo_vmware.rw_handles [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 711.812027] env[61855]: ERROR oslo_vmware.rw_handles [ 711.812027] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 711.814249] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 711.814249] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Copying Virtual Disk [datastore2] vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/80c679b8-57a1-4c75-8ba4-fc31fdc1a8fb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 711.814249] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b924c64-52fd-40d4-afea-02dae39b1f2e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.822142] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Waiting for the task: (returnval){ [ 711.822142] env[61855]: value = "task-4302818" [ 711.822142] env[61855]: _type = "Task" [ 711.822142] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.830200] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Task: {'id': task-4302818, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.333242] env[61855]: DEBUG oslo_vmware.exceptions [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 712.333498] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.334048] env[61855]: ERROR nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 712.334048] env[61855]: Faults: ['InvalidArgument'] [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Traceback (most recent call last): [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] yield resources [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self.driver.spawn(context, instance, image_meta, [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self._fetch_image_if_missing(context, vi) [ 712.334048] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] image_cache(vi, tmp_image_ds_loc) [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] vm_util.copy_virtual_disk( [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] session._wait_for_task(vmdk_copy_task) [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] return self.wait_for_task(task_ref) [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] return evt.wait() [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] result = hub.switch() [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 712.334387] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] return self.greenlet.switch() [ 712.334751] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 712.334751] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self.f(*self.args, **self.kw) [ 712.334751] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 712.334751] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] raise exceptions.translate_fault(task_info.error) [ 712.334751] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 712.334751] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Faults: ['InvalidArgument'] [ 712.334751] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] [ 712.334751] env[61855]: INFO nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Terminating instance [ 712.335890] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.336115] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.336716] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 712.336903] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 712.337140] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62f9b3b5-7d7e-4c02-bb01-bc0f53216910 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.339602] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e4f6da-bba8-4cbd-819e-fb81fa4fcf6a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.346553] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 712.346765] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-241d8597-81a9-4268-84f0-0729424a4e05 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.348900] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.349089] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 712.350158] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42477f12-5c9f-4f5c-93fc-79f111eb6343 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.354727] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for the task: (returnval){ [ 712.354727] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c79e0c-58e7-2b28-8d31-a6d0e5c36bf5" [ 712.354727] env[61855]: _type = "Task" [ 712.354727] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.364528] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c79e0c-58e7-2b28-8d31-a6d0e5c36bf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.422843] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 712.423832] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 712.423832] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Deleting the datastore file [datastore2] 0cacf291-e966-4c3e-8cf7-f664295c2f91 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 712.423832] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b3057a6-7534-4448-b9e3-1175dca20508 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.429928] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Waiting for the task: (returnval){ [ 712.429928] env[61855]: value = "task-4302820" [ 712.429928] env[61855]: _type = "Task" [ 712.429928] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.437697] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Task: {'id': task-4302820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.865263] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 712.865608] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Creating directory with path [datastore2] vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.865856] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0405ba38-efe1-449d-b853-14825481fc97 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.877510] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Created directory with path [datastore2] vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.877716] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Fetch image to [datastore2] vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 712.877889] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 712.878662] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265c86f8-7190-44fa-8f5d-24d30ea5277f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.885073] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ccbede-0788-4b58-b343-6c82368d7452 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.893936] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7441cd2e-bcf6-45a4-82be-2a6d07d3cc5c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.925627] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526c12ea-4410-4ef8-a350-8e85cb194364 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.933890] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3ebc04d8-0689-496b-a519-1771a4de7d49 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.940099] env[61855]: DEBUG oslo_vmware.api [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Task: {'id': task-4302820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071508} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.940331] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 712.940512] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 712.940684] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 712.940856] env[61855]: INFO nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Took 0.60 seconds to destroy the instance on the hypervisor. [ 712.943538] env[61855]: DEBUG nova.compute.claims [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 712.943799] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.943942] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.964021] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 713.024180] env[61855]: DEBUG oslo_vmware.rw_handles [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 713.084825] env[61855]: DEBUG oslo_vmware.rw_handles [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 713.085031] env[61855]: DEBUG oslo_vmware.rw_handles [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 713.397090] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff583f6-ed68-45cc-9993-34159a7db14e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.405678] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c677efc1-c28d-44e0-96f1-171e9d015e5f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.434969] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173240c8-1906-44f1-a87a-e9143b3f737b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.442122] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d415779-1a29-48f4-ba0b-108aa345f4dc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.455029] env[61855]: DEBUG nova.compute.provider_tree [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.463708] env[61855]: DEBUG nova.scheduler.client.report [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 713.479465] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.535s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.480041] env[61855]: ERROR nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 713.480041] env[61855]: Faults: ['InvalidArgument'] [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Traceback (most recent call last): [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self.driver.spawn(context, instance, image_meta, [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self._fetch_image_if_missing(context, vi) [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] image_cache(vi, tmp_image_ds_loc) [ 713.480041] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] vm_util.copy_virtual_disk( [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] session._wait_for_task(vmdk_copy_task) [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] return self.wait_for_task(task_ref) [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] return evt.wait() [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] result = hub.switch() [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] return self.greenlet.switch() [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 713.480436] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] self.f(*self.args, **self.kw) [ 713.480784] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 713.480784] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] raise exceptions.translate_fault(task_info.error) [ 713.480784] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 713.480784] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Faults: ['InvalidArgument'] [ 713.480784] env[61855]: ERROR nova.compute.manager [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] [ 713.480784] env[61855]: DEBUG nova.compute.utils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 713.482243] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Build of instance 0cacf291-e966-4c3e-8cf7-f664295c2f91 was re-scheduled: A specified parameter was not correct: fileType [ 713.482243] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 713.482609] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 713.482783] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 713.482941] env[61855]: DEBUG nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 713.483116] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 713.823293] env[61855]: DEBUG nova.network.neutron [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.834455] env[61855]: INFO nova.compute.manager [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 0cacf291-e966-4c3e-8cf7-f664295c2f91] Took 0.35 seconds to deallocate network for instance. [ 713.941189] env[61855]: INFO nova.scheduler.client.report [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Deleted allocations for instance 0cacf291-e966-4c3e-8cf7-f664295c2f91 [ 713.969886] env[61855]: DEBUG oslo_concurrency.lockutils [None req-841dd827-2c66-43bc-b4d4-53dbb2fbba75 tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "0cacf291-e966-4c3e-8cf7-f664295c2f91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.033s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.003419] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 714.071963] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.072254] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.073849] env[61855]: INFO nova.compute.claims [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.454163] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72831225-9317-46b4-86e0-766a6b1167da {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.462033] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e8a7ef-36a6-4a04-822c-5438283ac5eb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.491158] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5dc4b7-ccc0-4e9b-a86a-4d5c5d6ef870 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.498249] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88dbb1d-0650-43c5-887d-bcab3787cc44 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.513084] env[61855]: DEBUG nova.compute.provider_tree [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.521823] env[61855]: DEBUG nova.scheduler.client.report [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 714.539209] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.467s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.539725] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 714.575169] env[61855]: DEBUG nova.compute.utils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 714.575169] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 714.575169] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 714.585242] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 714.652295] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 714.657604] env[61855]: DEBUG nova.policy [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b536bb0e0bd84372ba005793bc90fe8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03caace395334d40bb967531fd39c9b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 714.683475] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 714.683712] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 714.683867] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.684074] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 714.684220] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.684565] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 714.684565] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 714.684709] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 714.684872] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 714.685300] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 714.685356] env[61855]: DEBUG nova.virt.hardware [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 714.686185] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5bb6c6-cc5e-4ab5-8e3f-d5fbf6b69d68 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.697278] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7ba29d-c6fd-4ccc-8157-990ca306da17 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.147805] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Successfully created port: 028381be-c003-4802-b693-75fbeab56e0b {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.859383] env[61855]: DEBUG oslo_concurrency.lockutils [None req-12cd4ca2-e3e6-494b-9daf-05d6757feeca tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Acquiring lock "850493b3-7c3d-4b01-a807-bc4cacb0cb5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.859383] env[61855]: DEBUG oslo_concurrency.lockutils [None req-12cd4ca2-e3e6-494b-9daf-05d6757feeca tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "850493b3-7c3d-4b01-a807-bc4cacb0cb5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.222834] env[61855]: DEBUG nova.compute.manager [req-0786d5d8-e37d-46f0-b41e-cb68caca0a22 req-395f100f-4de5-4816-862d-18f2989c5546 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Received event network-vif-plugged-028381be-c003-4802-b693-75fbeab56e0b {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 716.223074] env[61855]: DEBUG oslo_concurrency.lockutils [req-0786d5d8-e37d-46f0-b41e-cb68caca0a22 req-395f100f-4de5-4816-862d-18f2989c5546 service nova] Acquiring lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.223572] env[61855]: DEBUG oslo_concurrency.lockutils [req-0786d5d8-e37d-46f0-b41e-cb68caca0a22 req-395f100f-4de5-4816-862d-18f2989c5546 service nova] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.223776] env[61855]: DEBUG oslo_concurrency.lockutils [req-0786d5d8-e37d-46f0-b41e-cb68caca0a22 req-395f100f-4de5-4816-862d-18f2989c5546 service nova] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.225087] env[61855]: DEBUG nova.compute.manager [req-0786d5d8-e37d-46f0-b41e-cb68caca0a22 req-395f100f-4de5-4816-862d-18f2989c5546 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] No waiting events found dispatching network-vif-plugged-028381be-c003-4802-b693-75fbeab56e0b {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 716.225528] env[61855]: WARNING nova.compute.manager [req-0786d5d8-e37d-46f0-b41e-cb68caca0a22 req-395f100f-4de5-4816-862d-18f2989c5546 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Received unexpected event network-vif-plugged-028381be-c003-4802-b693-75fbeab56e0b for instance with vm_state building and task_state spawning. [ 716.227956] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Successfully updated port: 028381be-c003-4802-b693-75fbeab56e0b {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.239687] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "refresh_cache-0519cba4-d9b1-4f54-b889-2c09d2d26b14" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.239938] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquired lock "refresh_cache-0519cba4-d9b1-4f54-b889-2c09d2d26b14" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.239995] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.306268] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.549692] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Updating instance_info_cache with network_info: [{"id": "028381be-c003-4802-b693-75fbeab56e0b", "address": "fa:16:3e:54:3a:34", "network": {"id": "dd892e75-96b1-4d19-b62e-6eaa2c2864d1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-767404633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03caace395334d40bb967531fd39c9b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028381be-c0", "ovs_interfaceid": "028381be-c003-4802-b693-75fbeab56e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.564079] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Releasing lock "refresh_cache-0519cba4-d9b1-4f54-b889-2c09d2d26b14" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.564384] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Instance network_info: |[{"id": "028381be-c003-4802-b693-75fbeab56e0b", "address": "fa:16:3e:54:3a:34", "network": {"id": "dd892e75-96b1-4d19-b62e-6eaa2c2864d1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-767404633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03caace395334d40bb967531fd39c9b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028381be-c0", "ovs_interfaceid": "028381be-c003-4802-b693-75fbeab56e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 716.564787] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:3a:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc6e6fe1-c4f5-4389-a49f-0978060eebb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '028381be-c003-4802-b693-75fbeab56e0b', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.573175] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Creating folder: Project (03caace395334d40bb967531fd39c9b7). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 716.573744] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52bfd841-4dc7-4833-ba61-e7a415405b68 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.585638] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Created folder: Project (03caace395334d40bb967531fd39c9b7) in parent group-v847048. [ 716.585638] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Creating folder: Instances. Parent ref: group-v847087. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 716.585638] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-457ac293-0599-4995-b414-96497da6070f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.593010] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Created folder: Instances in parent group-v847087. [ 716.593283] env[61855]: DEBUG oslo.service.loopingcall [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 716.593512] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 716.593738] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7011ab1-c2b9-4553-adfe-1fa8c48ffff0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.612428] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.612428] env[61855]: value = "task-4302823" [ 716.612428] env[61855]: _type = "Task" [ 716.612428] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.621160] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302823, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.122927] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302823, 'name': CreateVM_Task, 'duration_secs': 0.326467} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.123997] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 717.123997] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.123997] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.124355] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 717.124600] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf750aa8-c81c-4267-bed2-93fdfb9f9671 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.129377] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Waiting for the task: (returnval){ [ 717.129377] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5295e5c5-031b-94be-e3eb-7b0951bf1a4f" [ 717.129377] env[61855]: _type = "Task" [ 717.129377] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.137020] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5295e5c5-031b-94be-e3eb-7b0951bf1a4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.639382] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.639693] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.639907] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.253983] env[61855]: DEBUG nova.compute.manager [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Received event network-changed-028381be-c003-4802-b693-75fbeab56e0b {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 718.254286] env[61855]: DEBUG nova.compute.manager [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Refreshing instance network info cache due to event network-changed-028381be-c003-4802-b693-75fbeab56e0b. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 718.254515] env[61855]: DEBUG oslo_concurrency.lockutils [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] Acquiring lock "refresh_cache-0519cba4-d9b1-4f54-b889-2c09d2d26b14" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.254669] env[61855]: DEBUG oslo_concurrency.lockutils [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] Acquired lock "refresh_cache-0519cba4-d9b1-4f54-b889-2c09d2d26b14" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.254834] env[61855]: DEBUG nova.network.neutron [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Refreshing network info cache for port 028381be-c003-4802-b693-75fbeab56e0b {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 718.589805] env[61855]: DEBUG nova.network.neutron [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Updated VIF entry in instance network info cache for port 028381be-c003-4802-b693-75fbeab56e0b. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 718.590170] env[61855]: DEBUG nova.network.neutron [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Updating instance_info_cache with network_info: [{"id": "028381be-c003-4802-b693-75fbeab56e0b", "address": "fa:16:3e:54:3a:34", "network": {"id": "dd892e75-96b1-4d19-b62e-6eaa2c2864d1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-767404633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03caace395334d40bb967531fd39c9b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc6e6fe1-c4f5-4389-a49f-0978060eebb4", "external-id": "nsx-vlan-transportzone-829", "segmentation_id": 829, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028381be-c0", "ovs_interfaceid": "028381be-c003-4802-b693-75fbeab56e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.603276] env[61855]: DEBUG oslo_concurrency.lockutils [req-4a2c3557-2e88-4f8c-b627-bc2f9f69f9d2 req-3ad89f38-011d-459b-9b2f-c73725b2bcf0 service nova] Releasing lock "refresh_cache-0519cba4-d9b1-4f54-b889-2c09d2d26b14" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.923850] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.924693] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.925442] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 758.919542] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.920011] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.943410] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.944457] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.944457] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.957832] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.958036] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.958270] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.958439] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 758.959535] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6365d33-f772-43ab-96d7-b37ba07272d6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.968986] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcb7a60-8035-40cb-a21e-ff6c83f1d7b7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.982853] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efebf26e-d997-4dd2-aab9-a6dc7fd669f2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.988692] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e5137d-2edf-4596-adcf-65333040c04d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.019569] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180671MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 759.019569] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.019569] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.096016] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d0f9134e-2797-4265-86d8-d68b5d3beb7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.096227] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance fa375b5c-bf96-4f57-a97c-73ef45bbef75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.096361] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e0adc99-63ad-4cca-b300-d67dc2928324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.096484] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.096596] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e21722cc-672b-4f8a-9f78-e50ac83071a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.096714] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 311d492c-0093-4d64-a56f-80fce95b809a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.096839] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6c15201d-7373-4040-9256-84ff11fcfed2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.096955] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.097127] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.097183] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 759.108321] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.118690] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b979c065-ea7c-43bc-8701-fb77b4945ddf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.129154] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f836f443-77b5-41ae-a1c2-1ee5f80885fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.138680] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d61ac621-8140-4a40-8e00-acc041e3e0b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.148302] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e14a79e-f3fa-47f5-afff-8d159c6d8a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.158541] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e454739d-dd42-4cdb-90bb-2bf733688af7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.170079] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 83531c89-23dd-47b8-82dd-f8ec7b95dd4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.182547] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e3a48262-e571-4b12-8f5f-5c8f5f65e5eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.192411] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.202510] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0c391391-3357-41d5-995b-70accf3aa2a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.212088] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 21b0c6a6-05f7-4d92-9e2e-7ec332a4337a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.221932] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a2337e7e-3cc9-4427-99fc-b2990dca9cd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.231796] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b4b12e43-e240-4782-9041-8887334e6361 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.241537] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 5ae9308d-1e1f-49ab-aafc-022a936d2f15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.251377] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8fb8bacf-aca4-4696-971e-559f85c002b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.261543] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3568e9ec-c87c-4831-bd20-d3cfab106e0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.272439] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 00e8d25c-9c28-47ee-8fd7-8734df1a9a3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.283527] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b78cec38-feba-4723-b735-1d9afc5edadc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.294036] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.304454] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 850493b3-7c3d-4b01-a807-bc4cacb0cb5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.304807] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 759.304914] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 759.656571] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fa4c77-6e59-4012-96e5-ace678c389b2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.664439] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd386fe2-fbbb-4f0e-81a4-cba153f413bf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.693459] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7632c9a7-9d80-4946-bb75-bcaa601dfcb5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.700588] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31a3544-3183-4953-ba3e-1b8be811957d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.713169] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.722061] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.736064] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 759.736064] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.717s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.717013] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.717378] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 760.717378] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 760.738182] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.738389] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.738528] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.738662] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.738788] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.738910] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.739049] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.739174] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.739554] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.739693] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 760.739818] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 760.740415] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.740595] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.610414] env[61855]: WARNING oslo_vmware.rw_handles [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 762.610414] env[61855]: ERROR oslo_vmware.rw_handles [ 762.611069] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 762.612910] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 762.613304] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Copying Virtual Disk [datastore2] vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/232813f3-40b0-4087-8e85-dfc768430f84/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 762.613616] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ec91765-af93-4713-9008-0a2b3559b423 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.621822] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for the task: (returnval){ [ 762.621822] env[61855]: value = "task-4302824" [ 762.621822] env[61855]: _type = "Task" [ 762.621822] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.630831] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': task-4302824, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.133013] env[61855]: DEBUG oslo_vmware.exceptions [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 763.133013] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.133416] env[61855]: ERROR nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 763.133416] env[61855]: Faults: ['InvalidArgument'] [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Traceback (most recent call last): [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] yield resources [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self.driver.spawn(context, instance, image_meta, [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self._fetch_image_if_missing(context, vi) [ 763.133416] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] image_cache(vi, tmp_image_ds_loc) [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] vm_util.copy_virtual_disk( [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] session._wait_for_task(vmdk_copy_task) [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] return self.wait_for_task(task_ref) [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] return evt.wait() [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] result = hub.switch() [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.133930] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] return self.greenlet.switch() [ 763.134390] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 763.134390] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self.f(*self.args, **self.kw) [ 763.134390] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 763.134390] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] raise exceptions.translate_fault(task_info.error) [ 763.134390] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 763.134390] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Faults: ['InvalidArgument'] [ 763.134390] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] [ 763.134390] env[61855]: INFO nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Terminating instance [ 763.135324] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.135538] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.136177] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 763.136372] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 763.136595] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fceea61-93fa-42e3-9bd1-709194bfa019 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.138904] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1568d9e6-aaf8-424b-a9df-a641033ba2cc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.146247] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 763.147234] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbd0ce92-3c3f-4d42-8597-3bb895e84310 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.148610] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.148779] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 763.149442] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4921717-4107-4d9b-bf95-b5eb2a9b5ae3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.154818] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Waiting for the task: (returnval){ [ 763.154818] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524d8fcc-c50c-b195-51d5-1a0b628da025" [ 763.154818] env[61855]: _type = "Task" [ 763.154818] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.165206] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524d8fcc-c50c-b195-51d5-1a0b628da025, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.218242] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 763.218472] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 763.218656] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Deleting the datastore file [datastore2] d0f9134e-2797-4265-86d8-d68b5d3beb7a {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 763.218966] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66e794d7-a7d9-40cc-953e-81351a5279ad {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.224685] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for the task: (returnval){ [ 763.224685] env[61855]: value = "task-4302826" [ 763.224685] env[61855]: _type = "Task" [ 763.224685] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.232048] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': task-4302826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.665030] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 763.665344] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Creating directory with path [datastore2] vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.665394] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-260221fa-72c2-47b8-a9ca-bd0358d26682 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.676566] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Created directory with path [datastore2] vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.676760] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Fetch image to [datastore2] vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 763.676929] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 763.677689] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e05826-e865-4bbd-8eb5-f2d3ffe96085 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.684274] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c06475d-a465-4bfe-af12-afc320b660ea {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.693210] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b96f9a-33c3-42a6-9cce-5482a29a1bf0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.723507] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e24426d-d38a-46e3-a624-c35d626ae653 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.735037] env[61855]: DEBUG oslo_vmware.api [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': task-4302826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077337} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.735518] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.735706] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 763.735878] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 763.736061] env[61855]: INFO nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 763.737626] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d778011c-362e-4689-b191-015e2a1ed437 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.739516] env[61855]: DEBUG nova.compute.claims [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 763.739701] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.739910] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.760178] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 763.817454] env[61855]: DEBUG oslo_vmware.rw_handles [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 763.875951] env[61855]: DEBUG oslo_vmware.rw_handles [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 763.877362] env[61855]: DEBUG oslo_vmware.rw_handles [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 764.199447] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62943833-fda9-4760-be65-c85363c6bb82 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.207072] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205c0d61-2fac-4765-aff9-6af4be218729 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.235923] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04941109-4c47-410c-a0ca-7dbdb41bf594 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.243022] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f6066b-aeeb-4a0d-a5be-3182bf672915 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.255825] env[61855]: DEBUG nova.compute.provider_tree [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.264298] env[61855]: DEBUG nova.scheduler.client.report [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 764.279047] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.539s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.279591] env[61855]: ERROR nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 764.279591] env[61855]: Faults: ['InvalidArgument'] [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Traceback (most recent call last): [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self.driver.spawn(context, instance, image_meta, [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self._fetch_image_if_missing(context, vi) [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] image_cache(vi, tmp_image_ds_loc) [ 764.279591] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] vm_util.copy_virtual_disk( [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] session._wait_for_task(vmdk_copy_task) [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] return self.wait_for_task(task_ref) [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] return evt.wait() [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] result = hub.switch() [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] return self.greenlet.switch() [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 764.279970] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] self.f(*self.args, **self.kw) [ 764.280335] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 764.280335] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] raise exceptions.translate_fault(task_info.error) [ 764.280335] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 764.280335] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Faults: ['InvalidArgument'] [ 764.280335] env[61855]: ERROR nova.compute.manager [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] [ 764.280335] env[61855]: DEBUG nova.compute.utils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 764.281683] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Build of instance d0f9134e-2797-4265-86d8-d68b5d3beb7a was re-scheduled: A specified parameter was not correct: fileType [ 764.281683] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 764.282082] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 764.282279] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 764.282401] env[61855]: DEBUG nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 764.282571] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 764.697862] env[61855]: DEBUG nova.network.neutron [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.713743] env[61855]: INFO nova.compute.manager [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: d0f9134e-2797-4265-86d8-d68b5d3beb7a] Took 0.43 seconds to deallocate network for instance. [ 764.815423] env[61855]: INFO nova.scheduler.client.report [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Deleted allocations for instance d0f9134e-2797-4265-86d8-d68b5d3beb7a [ 764.842129] env[61855]: DEBUG oslo_concurrency.lockutils [None req-552ea402-f522-424e-8da3-83644380b68c tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "d0f9134e-2797-4265-86d8-d68b5d3beb7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.498s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.854085] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 764.914805] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.915084] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.916671] env[61855]: INFO nova.compute.claims [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.316789] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73af623-0218-4a89-b979-4ae711f079e7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.324865] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912c76f2-5e9e-49bd-bf7c-35e31434aabf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.354439] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98db4d8-7aa3-43d7-9d8f-3cb7d8ee5c7b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.362054] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c912976-b9c3-4b15-8ad3-c90d136b1584 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.376186] env[61855]: DEBUG nova.compute.provider_tree [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.385608] env[61855]: DEBUG nova.scheduler.client.report [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.406348] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.491s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.407016] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 765.441636] env[61855]: DEBUG nova.compute.utils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 765.442910] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 765.443113] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 765.452266] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 765.514961] env[61855]: DEBUG nova.policy [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5256e05619f744e988b78876f04b7286', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a6c123dab04b01868b291d2b953e75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 765.518433] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 765.543376] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 765.543599] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 765.543762] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.543947] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 765.544109] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.544263] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 765.544486] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 765.544638] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 765.544805] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 765.544968] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 765.545158] env[61855]: DEBUG nova.virt.hardware [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.546012] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb36a9e7-d9fb-456e-8098-08c3afb160d2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.554613] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d8c621-a363-40a5-a521-bfa9ae8a8827 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.908849] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Successfully created port: 0cd60071-9c40-4071-ac6f-a0f15c26a194 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.789730] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Successfully updated port: 0cd60071-9c40-4071-ac6f-a0f15c26a194 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 766.811217] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "refresh_cache-9c9d0334-be7e-466a-ab2c-cc3fbe82d756" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.811590] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "refresh_cache-9c9d0334-be7e-466a-ab2c-cc3fbe82d756" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.811829] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.890393] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.146471] env[61855]: DEBUG nova.compute.manager [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Received event network-vif-plugged-0cd60071-9c40-4071-ac6f-a0f15c26a194 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 767.146703] env[61855]: DEBUG oslo_concurrency.lockutils [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] Acquiring lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.147208] env[61855]: DEBUG oslo_concurrency.lockutils [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.147406] env[61855]: DEBUG oslo_concurrency.lockutils [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.147581] env[61855]: DEBUG nova.compute.manager [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] No waiting events found dispatching network-vif-plugged-0cd60071-9c40-4071-ac6f-a0f15c26a194 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 767.147756] env[61855]: WARNING nova.compute.manager [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Received unexpected event network-vif-plugged-0cd60071-9c40-4071-ac6f-a0f15c26a194 for instance with vm_state building and task_state spawning. [ 767.147924] env[61855]: DEBUG nova.compute.manager [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Received event network-changed-0cd60071-9c40-4071-ac6f-a0f15c26a194 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 767.148110] env[61855]: DEBUG nova.compute.manager [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Refreshing instance network info cache due to event network-changed-0cd60071-9c40-4071-ac6f-a0f15c26a194. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 767.148273] env[61855]: DEBUG oslo_concurrency.lockutils [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] Acquiring lock "refresh_cache-9c9d0334-be7e-466a-ab2c-cc3fbe82d756" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.148785] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.148980] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.154424] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Updating instance_info_cache with network_info: [{"id": "0cd60071-9c40-4071-ac6f-a0f15c26a194", "address": "fa:16:3e:6d:3e:88", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cd60071-9c", "ovs_interfaceid": "0cd60071-9c40-4071-ac6f-a0f15c26a194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.165667] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "refresh_cache-9c9d0334-be7e-466a-ab2c-cc3fbe82d756" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.166368] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Instance network_info: |[{"id": "0cd60071-9c40-4071-ac6f-a0f15c26a194", "address": "fa:16:3e:6d:3e:88", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cd60071-9c", "ovs_interfaceid": "0cd60071-9c40-4071-ac6f-a0f15c26a194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 767.166551] env[61855]: DEBUG oslo_concurrency.lockutils [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] Acquired lock "refresh_cache-9c9d0334-be7e-466a-ab2c-cc3fbe82d756" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.166737] env[61855]: DEBUG nova.network.neutron [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Refreshing network info cache for port 0cd60071-9c40-4071-ac6f-a0f15c26a194 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 767.167791] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:3e:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f925dc8-2145-457e-a4d4-c07117356dd0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0cd60071-9c40-4071-ac6f-a0f15c26a194', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.176400] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating folder: Project (91a6c123dab04b01868b291d2b953e75). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 767.179878] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-629b1db5-8e0d-4d65-988b-aaf5fa08fb7b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.192736] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created folder: Project (91a6c123dab04b01868b291d2b953e75) in parent group-v847048. [ 767.192736] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating folder: Instances. Parent ref: group-v847090. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 767.192736] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cabd1ce-be18-4bf1-a7fc-8c9f69d01f02 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.204661] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created folder: Instances in parent group-v847090. [ 767.204661] env[61855]: DEBUG oslo.service.loopingcall [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.204661] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 767.204661] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4413ec2a-8bb3-490a-8d9d-aebdac4cbf1c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.224168] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.224168] env[61855]: value = "task-4302829" [ 767.224168] env[61855]: _type = "Task" [ 767.224168] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.232213] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302829, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.722656] env[61855]: DEBUG nova.network.neutron [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Updated VIF entry in instance network info cache for port 0cd60071-9c40-4071-ac6f-a0f15c26a194. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 767.722952] env[61855]: DEBUG nova.network.neutron [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Updating instance_info_cache with network_info: [{"id": "0cd60071-9c40-4071-ac6f-a0f15c26a194", "address": "fa:16:3e:6d:3e:88", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cd60071-9c", "ovs_interfaceid": "0cd60071-9c40-4071-ac6f-a0f15c26a194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.741363] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302829, 'name': CreateVM_Task, 'duration_secs': 0.302706} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.742104] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 767.742529] env[61855]: DEBUG oslo_concurrency.lockutils [req-8ba3ff54-8c35-4351-9691-be39caae5060 req-83877ec3-a4d4-4ee1-89dd-c7c0d91835d7 service nova] Releasing lock "refresh_cache-9c9d0334-be7e-466a-ab2c-cc3fbe82d756" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.743216] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.743392] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.743674] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 767.744150] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6698188-842a-4091-b9f0-b6fad84c75dd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.749140] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 767.749140] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5257b57c-36d6-1f2d-0ca0-d198a2312c7d" [ 767.749140] env[61855]: _type = "Task" [ 767.749140] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.757459] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5257b57c-36d6-1f2d-0ca0-d198a2312c7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.261031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.261421] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.261695] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.715316] env[61855]: DEBUG oslo_concurrency.lockutils [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.133139] env[61855]: DEBUG oslo_concurrency.lockutils [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "6e0adc99-63ad-4cca-b300-d67dc2928324" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.593402] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "e21722cc-672b-4f8a-9f78-e50ac83071a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.245950] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "311d492c-0093-4d64-a56f-80fce95b809a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.560794] env[61855]: DEBUG oslo_concurrency.lockutils [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "6c15201d-7373-4040-9256-84ff11fcfed2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.197800] env[61855]: DEBUG oslo_concurrency.lockutils [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.826082] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "3ae180bd-526d-481f-958b-ca3af96b4406" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.314483] env[61855]: DEBUG oslo_concurrency.lockutils [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.732031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.454736] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.454736] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.851060] env[61855]: WARNING oslo_vmware.rw_handles [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 811.851060] env[61855]: ERROR oslo_vmware.rw_handles [ 811.853180] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 811.857530] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 811.858276] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Copying Virtual Disk [datastore2] vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/2fa84b5e-f478-4bcf-9ef1-b76fbc11629a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 811.858713] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2f4edf9-a563-4c63-87c7-9ff2d56b36f7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.873027] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Waiting for the task: (returnval){ [ 811.873027] env[61855]: value = "task-4302830" [ 811.873027] env[61855]: _type = "Task" [ 811.873027] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.879966] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Task: {'id': task-4302830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.383712] env[61855]: DEBUG oslo_vmware.exceptions [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 812.384076] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.384870] env[61855]: ERROR nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.384870] env[61855]: Faults: ['InvalidArgument'] [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Traceback (most recent call last): [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] yield resources [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self.driver.spawn(context, instance, image_meta, [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self._fetch_image_if_missing(context, vi) [ 812.384870] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] image_cache(vi, tmp_image_ds_loc) [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] vm_util.copy_virtual_disk( [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] session._wait_for_task(vmdk_copy_task) [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] return self.wait_for_task(task_ref) [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] return evt.wait() [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] result = hub.switch() [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.385430] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] return self.greenlet.switch() [ 812.385989] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 812.385989] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self.f(*self.args, **self.kw) [ 812.385989] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 812.385989] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] raise exceptions.translate_fault(task_info.error) [ 812.385989] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.385989] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Faults: ['InvalidArgument'] [ 812.385989] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] [ 812.385989] env[61855]: INFO nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Terminating instance [ 812.388460] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 812.388703] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.389495] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.389495] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.390186] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4c9ddd-8874-4384-899c-3ac3b4c74b41 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.393597] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59ce82e4-0074-414a-a391-0523e00df2dc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.404830] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 812.404830] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f35c8f42-5f64-413f-9f02-8aea6bdfd3b8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.413750] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.413750] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 812.413750] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a69ee396-8172-49dc-a016-b0c110fcb4c4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.418030] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Waiting for the task: (returnval){ [ 812.418030] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52992009-1b16-c6b8-85c7-bd5114126d1c" [ 812.418030] env[61855]: _type = "Task" [ 812.418030] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.433235] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52992009-1b16-c6b8-85c7-bd5114126d1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.494826] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 812.495619] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 812.497143] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Deleting the datastore file [datastore2] fa375b5c-bf96-4f57-a97c-73ef45bbef75 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.497143] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85f0e8b5-e3d5-4892-9310-fe62064ca3b9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.504526] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Waiting for the task: (returnval){ [ 812.504526] env[61855]: value = "task-4302832" [ 812.504526] env[61855]: _type = "Task" [ 812.504526] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.512903] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Task: {'id': task-4302832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.928183] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 812.928489] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Creating directory with path [datastore2] vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.928754] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69b245df-8a2c-4d33-b1af-84b38c5ec607 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.954956] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Created directory with path [datastore2] vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.956411] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Fetch image to [datastore2] vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 812.956411] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 812.956411] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c96b2bd-9245-4972-9717-5ba15c56ff52 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.968941] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d651fe4-ceb7-43eb-a088-022416444100 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.982109] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ad7f92-d97e-471e-a8cd-9215964b4d77 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.029216] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962850ef-cd4d-44d5-a8bf-49b0f4f74031 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.036545] env[61855]: DEBUG oslo_vmware.api [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Task: {'id': task-4302832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195319} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.038717] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.038717] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 813.038717] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 813.038964] env[61855]: INFO nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Took 0.65 seconds to destroy the instance on the hypervisor. [ 813.041165] env[61855]: DEBUG nova.compute.claims [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 813.043307] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.043307] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.047372] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-878b7966-105a-44aa-a8b0-5f4fb885f5fe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.069573] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 813.163687] env[61855]: DEBUG oslo_vmware.rw_handles [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 813.242965] env[61855]: DEBUG oslo_vmware.rw_handles [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 813.243137] env[61855]: DEBUG oslo_vmware.rw_handles [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 813.634697] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37caf511-b8b9-47e6-bb84-64d14c3bfbdf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.643559] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5c1613-a503-48b7-adfe-a07e66e8459e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.675629] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7a18a0-dafd-4a75-a4e4-b770515afe12 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.682712] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4699ffd0-f68c-4adc-a8d6-281fb35a817b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.698844] env[61855]: DEBUG nova.compute.provider_tree [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.711409] env[61855]: DEBUG nova.scheduler.client.report [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 813.735616] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.694s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.736392] env[61855]: ERROR nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 813.736392] env[61855]: Faults: ['InvalidArgument'] [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Traceback (most recent call last): [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self.driver.spawn(context, instance, image_meta, [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self._fetch_image_if_missing(context, vi) [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] image_cache(vi, tmp_image_ds_loc) [ 813.736392] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] vm_util.copy_virtual_disk( [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] session._wait_for_task(vmdk_copy_task) [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] return self.wait_for_task(task_ref) [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] return evt.wait() [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] result = hub.switch() [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] return self.greenlet.switch() [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 813.737191] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] self.f(*self.args, **self.kw) [ 813.737528] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 813.737528] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] raise exceptions.translate_fault(task_info.error) [ 813.737528] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 813.737528] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Faults: ['InvalidArgument'] [ 813.737528] env[61855]: ERROR nova.compute.manager [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] [ 813.737528] env[61855]: DEBUG nova.compute.utils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 813.739483] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Build of instance fa375b5c-bf96-4f57-a97c-73ef45bbef75 was re-scheduled: A specified parameter was not correct: fileType [ 813.739483] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 813.739924] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 813.740178] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 813.740318] env[61855]: DEBUG nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 813.740504] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.706342] env[61855]: DEBUG nova.network.neutron [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.727505] env[61855]: INFO nova.compute.manager [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Took 0.99 seconds to deallocate network for instance. [ 814.864316] env[61855]: INFO nova.scheduler.client.report [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Deleted allocations for instance fa375b5c-bf96-4f57-a97c-73ef45bbef75 [ 814.884218] env[61855]: DEBUG oslo_concurrency.lockutils [None req-119c578a-4bbf-4f14-8d29-d6835365580e tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 245.882s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.885575] env[61855]: DEBUG oslo_concurrency.lockutils [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 46.170s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.885788] env[61855]: DEBUG oslo_concurrency.lockutils [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Acquiring lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.885992] env[61855]: DEBUG oslo_concurrency.lockutils [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.886162] env[61855]: DEBUG oslo_concurrency.lockutils [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.888882] env[61855]: INFO nova.compute.manager [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Terminating instance [ 814.890666] env[61855]: DEBUG nova.compute.manager [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 814.890906] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 814.891423] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db5fb2fc-4064-4cd8-8119-bf30a3ea4fce {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.900403] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9532000-0676-4629-b701-03a1f2bebd3e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.911573] env[61855]: DEBUG nova.compute.manager [None req-5f5fe65f-3452-4b2c-a43d-27f5ec5af611 tempest-VolumesAssistedSnapshotsTest-842109476 tempest-VolumesAssistedSnapshotsTest-842109476-project-member] [instance: b979c065-ea7c-43bc-8701-fb77b4945ddf] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 814.935412] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fa375b5c-bf96-4f57-a97c-73ef45bbef75 could not be found. [ 814.935643] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 814.935826] env[61855]: INFO nova.compute.manager [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Took 0.04 seconds to destroy the instance on the hypervisor. [ 814.936097] env[61855]: DEBUG oslo.service.loopingcall [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.936342] env[61855]: DEBUG nova.compute.manager [-] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 814.936438] env[61855]: DEBUG nova.network.neutron [-] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.970532] env[61855]: DEBUG nova.compute.manager [None req-5f5fe65f-3452-4b2c-a43d-27f5ec5af611 tempest-VolumesAssistedSnapshotsTest-842109476 tempest-VolumesAssistedSnapshotsTest-842109476-project-member] [instance: b979c065-ea7c-43bc-8701-fb77b4945ddf] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 814.980736] env[61855]: DEBUG nova.network.neutron [-] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.995619] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f5fe65f-3452-4b2c-a43d-27f5ec5af611 tempest-VolumesAssistedSnapshotsTest-842109476 tempest-VolumesAssistedSnapshotsTest-842109476-project-member] Lock "b979c065-ea7c-43bc-8701-fb77b4945ddf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.081s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.997540] env[61855]: INFO nova.compute.manager [-] [instance: fa375b5c-bf96-4f57-a97c-73ef45bbef75] Took 0.06 seconds to deallocate network for instance. [ 815.008307] env[61855]: DEBUG nova.compute.manager [None req-c9fc344b-a76a-4405-9606-12c92092ccd0 tempest-AttachInterfacesUnderV243Test-1665714490 tempest-AttachInterfacesUnderV243Test-1665714490-project-member] [instance: f836f443-77b5-41ae-a1c2-1ee5f80885fa] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 815.046209] env[61855]: DEBUG nova.compute.manager [None req-c9fc344b-a76a-4405-9606-12c92092ccd0 tempest-AttachInterfacesUnderV243Test-1665714490 tempest-AttachInterfacesUnderV243Test-1665714490-project-member] [instance: f836f443-77b5-41ae-a1c2-1ee5f80885fa] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 815.099761] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c9fc344b-a76a-4405-9606-12c92092ccd0 tempest-AttachInterfacesUnderV243Test-1665714490 tempest-AttachInterfacesUnderV243Test-1665714490-project-member] Lock "f836f443-77b5-41ae-a1c2-1ee5f80885fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.338s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.112671] env[61855]: DEBUG nova.compute.manager [None req-c255994d-1c8d-4a3b-aba6-efa461909568 tempest-ServersWithSpecificFlavorTestJSON-615293268 tempest-ServersWithSpecificFlavorTestJSON-615293268-project-member] [instance: d61ac621-8140-4a40-8e00-acc041e3e0b4] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 815.175545] env[61855]: DEBUG nova.compute.manager [None req-c255994d-1c8d-4a3b-aba6-efa461909568 tempest-ServersWithSpecificFlavorTestJSON-615293268 tempest-ServersWithSpecificFlavorTestJSON-615293268-project-member] [instance: d61ac621-8140-4a40-8e00-acc041e3e0b4] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 815.208243] env[61855]: DEBUG oslo_concurrency.lockutils [None req-950fe9cf-338c-4591-a7eb-e0c066c3aab1 tempest-ImagesOneServerTestJSON-1175515355 tempest-ImagesOneServerTestJSON-1175515355-project-member] Lock "fa375b5c-bf96-4f57-a97c-73ef45bbef75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.323s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.216998] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c255994d-1c8d-4a3b-aba6-efa461909568 tempest-ServersWithSpecificFlavorTestJSON-615293268 tempest-ServersWithSpecificFlavorTestJSON-615293268-project-member] Lock "d61ac621-8140-4a40-8e00-acc041e3e0b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.867s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.227526] env[61855]: DEBUG nova.compute.manager [None req-c80253ba-89ea-4c2e-867e-3c7684985db6 tempest-ServerAddressesNegativeTestJSON-547679814 tempest-ServerAddressesNegativeTestJSON-547679814-project-member] [instance: 6e14a79e-f3fa-47f5-afff-8d159c6d8a88] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 815.263279] env[61855]: DEBUG nova.compute.manager [None req-c80253ba-89ea-4c2e-867e-3c7684985db6 tempest-ServerAddressesNegativeTestJSON-547679814 tempest-ServerAddressesNegativeTestJSON-547679814-project-member] [instance: 6e14a79e-f3fa-47f5-afff-8d159c6d8a88] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 815.287250] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c80253ba-89ea-4c2e-867e-3c7684985db6 tempest-ServerAddressesNegativeTestJSON-547679814 tempest-ServerAddressesNegativeTestJSON-547679814-project-member] Lock "6e14a79e-f3fa-47f5-afff-8d159c6d8a88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.575s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.302417] env[61855]: DEBUG nova.compute.manager [None req-bb2fc590-8b72-4587-8cfa-5d4361c789b9 tempest-ServersNegativeTestJSON-1790298136 tempest-ServersNegativeTestJSON-1790298136-project-member] [instance: e454739d-dd42-4cdb-90bb-2bf733688af7] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 815.332574] env[61855]: DEBUG nova.compute.manager [None req-bb2fc590-8b72-4587-8cfa-5d4361c789b9 tempest-ServersNegativeTestJSON-1790298136 tempest-ServersNegativeTestJSON-1790298136-project-member] [instance: e454739d-dd42-4cdb-90bb-2bf733688af7] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 815.363317] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bb2fc590-8b72-4587-8cfa-5d4361c789b9 tempest-ServersNegativeTestJSON-1790298136 tempest-ServersNegativeTestJSON-1790298136-project-member] Lock "e454739d-dd42-4cdb-90bb-2bf733688af7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.632s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.372336] env[61855]: DEBUG nova.compute.manager [None req-64796851-1525-4f2a-a2ba-317b214ca030 tempest-ServerExternalEventsTest-1224379416 tempest-ServerExternalEventsTest-1224379416-project-member] [instance: 83531c89-23dd-47b8-82dd-f8ec7b95dd4d] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 815.397538] env[61855]: DEBUG nova.compute.manager [None req-64796851-1525-4f2a-a2ba-317b214ca030 tempest-ServerExternalEventsTest-1224379416 tempest-ServerExternalEventsTest-1224379416-project-member] [instance: 83531c89-23dd-47b8-82dd-f8ec7b95dd4d] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 815.446968] env[61855]: DEBUG oslo_concurrency.lockutils [None req-64796851-1525-4f2a-a2ba-317b214ca030 tempest-ServerExternalEventsTest-1224379416 tempest-ServerExternalEventsTest-1224379416-project-member] Lock "83531c89-23dd-47b8-82dd-f8ec7b95dd4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.997s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.457919] env[61855]: DEBUG nova.compute.manager [None req-de874d95-09fc-456b-96a1-d61fc7389294 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: e3a48262-e571-4b12-8f5f-5c8f5f65e5eb] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 815.488668] env[61855]: DEBUG nova.compute.manager [None req-de874d95-09fc-456b-96a1-d61fc7389294 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: e3a48262-e571-4b12-8f5f-5c8f5f65e5eb] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 815.530965] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de874d95-09fc-456b-96a1-d61fc7389294 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "e3a48262-e571-4b12-8f5f-5c8f5f65e5eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.167s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.541593] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 815.606056] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.606056] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.606056] env[61855]: INFO nova.compute.claims [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.923756] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 815.924090] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 815.943653] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] There are 0 instances to clean {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 815.943886] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 815.944040] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61855) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 815.959687] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.121577] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d24c0b-a09a-4f24-8ee2-ee88ab611864 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.130974] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38243d2f-3d2d-4a71-865e-ca005943ead7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.171013] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fe3e37-87dd-4bbc-af80-4029a4c5d1b4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.180627] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa5fc91-4d2d-4268-a37e-a56334c85388 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.194696] env[61855]: DEBUG nova.compute.provider_tree [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.204055] env[61855]: DEBUG nova.scheduler.client.report [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 816.220080] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.616s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.220596] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 816.278063] env[61855]: DEBUG nova.compute.utils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 816.282662] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 816.282846] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 816.292048] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 816.387978] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 816.424804] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 816.424804] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 816.424804] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.425260] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 816.428160] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.428544] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 816.428652] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 816.428834] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 816.429043] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 816.429221] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 816.429522] env[61855]: DEBUG nova.virt.hardware [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.430574] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341ee4f0-b8db-4171-b289-c1deb44dd7d6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.438729] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54b21bd-bbc7-4edd-8ff4-a1711c8f0917 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.635913] env[61855]: DEBUG nova.policy [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03d9681ceb2a4fc3bb4568d21d9e05bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be0c282a8b4748b49377b714672e2cb7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 816.968662] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.382155] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Successfully created port: df14bc6f-60fa-4f13-a1ee-9addbfb494cd {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.492677] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Successfully updated port: df14bc6f-60fa-4f13-a1ee-9addbfb494cd {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.514332] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "refresh_cache-0d6f9828-e93a-474d-af31-f0ee6cb2149f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.515163] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquired lock "refresh_cache-0d6f9828-e93a-474d-af31-f0ee6cb2149f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.515163] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 818.626249] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.715567] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.715878] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.919352] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.923556] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.936011] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.936244] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.936457] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.936636] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 818.937845] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fe9b89-36cd-4406-990b-e578fc53c828 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.950384] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a14b6a-b6b2-4442-8433-c16c739adb39 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.967832] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbd3c95-a628-403f-a97f-1b5a6c34172f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.976797] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87120e2-c5f7-4645-a168-a149ae120875 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.015151] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180669MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 819.015352] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.015791] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.107857] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6e0adc99-63ad-4cca-b300-d67dc2928324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.108045] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.108188] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e21722cc-672b-4f8a-9f78-e50ac83071a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.108317] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 311d492c-0093-4d64-a56f-80fce95b809a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.108454] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6c15201d-7373-4040-9256-84ff11fcfed2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.108576] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.108701] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.108820] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.109053] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.109177] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 819.126797] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0c391391-3357-41d5-995b-70accf3aa2a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.144557] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 21b0c6a6-05f7-4d92-9e2e-7ec332a4337a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.153774] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Updating instance_info_cache with network_info: [{"id": "df14bc6f-60fa-4f13-a1ee-9addbfb494cd", "address": "fa:16:3e:ab:d5:2c", "network": {"id": "b845f73b-3698-4875-b160-bfa31f58030e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-449135943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be0c282a8b4748b49377b714672e2cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "309d7cfa-b4da-4eec-9f4b-2e10d215fac7", "external-id": "nsx-vlan-transportzone-285", "segmentation_id": 285, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf14bc6f-60", "ovs_interfaceid": "df14bc6f-60fa-4f13-a1ee-9addbfb494cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.157285] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a2337e7e-3cc9-4427-99fc-b2990dca9cd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.173029] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Releasing lock "refresh_cache-0d6f9828-e93a-474d-af31-f0ee6cb2149f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.173029] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Instance network_info: |[{"id": "df14bc6f-60fa-4f13-a1ee-9addbfb494cd", "address": "fa:16:3e:ab:d5:2c", "network": {"id": "b845f73b-3698-4875-b160-bfa31f58030e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-449135943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be0c282a8b4748b49377b714672e2cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "309d7cfa-b4da-4eec-9f4b-2e10d215fac7", "external-id": "nsx-vlan-transportzone-285", "segmentation_id": 285, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf14bc6f-60", "ovs_interfaceid": "df14bc6f-60fa-4f13-a1ee-9addbfb494cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.173248] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:d5:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '309d7cfa-b4da-4eec-9f4b-2e10d215fac7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df14bc6f-60fa-4f13-a1ee-9addbfb494cd', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.179133] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Creating folder: Project (be0c282a8b4748b49377b714672e2cb7). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 819.179966] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-785c40ef-a20a-449c-aa07-debb9a0387d0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.182230] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b4b12e43-e240-4782-9041-8887334e6361 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.198406] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 5ae9308d-1e1f-49ab-aafc-022a936d2f15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.198406] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Created folder: Project (be0c282a8b4748b49377b714672e2cb7) in parent group-v847048. [ 819.198406] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Creating folder: Instances. Parent ref: group-v847093. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 819.198406] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f5f26ca-ec01-4a00-bbdb-2d8ec25dc1e9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.209097] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Created folder: Instances in parent group-v847093. [ 819.209364] env[61855]: DEBUG oslo.service.loopingcall [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.209544] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 819.209745] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7a635bc-04f4-4a0a-8291-607e5867ffbb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.231049] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8fb8bacf-aca4-4696-971e-559f85c002b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.239103] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.239103] env[61855]: value = "task-4302835" [ 819.239103] env[61855]: _type = "Task" [ 819.239103] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.243729] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3568e9ec-c87c-4831-bd20-d3cfab106e0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.251969] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302835, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.256149] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 00e8d25c-9c28-47ee-8fd7-8734df1a9a3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.275057] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b78cec38-feba-4723-b735-1d9afc5edadc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.289738] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.303954] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 850493b3-7c3d-4b01-a807-bc4cacb0cb5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.320017] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.343935] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.361984] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 819.363640] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 819.363640] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 819.614038] env[61855]: DEBUG nova.compute.manager [req-e518a63b-4b52-463b-a6f1-3bd28718e8eb req-5a6579a4-8a6a-4627-91f2-848bb9313eab service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Received event network-vif-plugged-df14bc6f-60fa-4f13-a1ee-9addbfb494cd {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 819.614328] env[61855]: DEBUG oslo_concurrency.lockutils [req-e518a63b-4b52-463b-a6f1-3bd28718e8eb req-5a6579a4-8a6a-4627-91f2-848bb9313eab service nova] Acquiring lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.614516] env[61855]: DEBUG oslo_concurrency.lockutils [req-e518a63b-4b52-463b-a6f1-3bd28718e8eb req-5a6579a4-8a6a-4627-91f2-848bb9313eab service nova] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.614920] env[61855]: DEBUG oslo_concurrency.lockutils [req-e518a63b-4b52-463b-a6f1-3bd28718e8eb req-5a6579a4-8a6a-4627-91f2-848bb9313eab service nova] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.614920] env[61855]: DEBUG nova.compute.manager [req-e518a63b-4b52-463b-a6f1-3bd28718e8eb req-5a6579a4-8a6a-4627-91f2-848bb9313eab service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] No waiting events found dispatching network-vif-plugged-df14bc6f-60fa-4f13-a1ee-9addbfb494cd {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 819.615031] env[61855]: WARNING nova.compute.manager [req-e518a63b-4b52-463b-a6f1-3bd28718e8eb req-5a6579a4-8a6a-4627-91f2-848bb9313eab service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Received unexpected event network-vif-plugged-df14bc6f-60fa-4f13-a1ee-9addbfb494cd for instance with vm_state building and task_state spawning. [ 819.749925] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302835, 'name': CreateVM_Task, 'duration_secs': 0.375563} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.750842] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 819.751695] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.751695] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.752036] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 819.752277] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-673c5bb8-b4b1-4935-a633-3985632e3b4f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.759878] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Waiting for the task: (returnval){ [ 819.759878] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]521acfdc-1ce2-f594-a4e8-9d9667985a02" [ 819.759878] env[61855]: _type = "Task" [ 819.759878] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.770708] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]521acfdc-1ce2-f594-a4e8-9d9667985a02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.878406] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab2c361-b2c2-4ae4-bf11-8c0e763c84fc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.887243] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5006f47-1c01-46d4-9f44-30a94fb343b9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.925963] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6bc11e-f9ed-4c9a-a5a4-4ea72c734cde {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.934916] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85d2aed-4d47-41f7-b0bd-2f99af507733 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.952831] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.970748] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.990356] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 819.990457] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.975s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.273086] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.273391] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.273619] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.295850] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.991781] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.991781] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 820.991781] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 821.030693] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.031818] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.032285] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.032285] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.032471] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.032978] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.032978] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.032978] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.032978] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.033111] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 821.033486] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 821.033892] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.034249] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.034841] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.034841] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.034960] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 821.924509] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.168409] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5fa6118b-e4c2-415e-9fcc-57fe7e76f571 tempest-ServerRescueTestJSON-1125875554 tempest-ServerRescueTestJSON-1125875554-project-member] Acquiring lock "b693f06d-14c8-49f7-8870-8b440908de74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.169310] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5fa6118b-e4c2-415e-9fcc-57fe7e76f571 tempest-ServerRescueTestJSON-1125875554 tempest-ServerRescueTestJSON-1125875554-project-member] Lock "b693f06d-14c8-49f7-8870-8b440908de74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.896400] env[61855]: DEBUG nova.compute.manager [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Received event network-changed-df14bc6f-60fa-4f13-a1ee-9addbfb494cd {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 822.896589] env[61855]: DEBUG nova.compute.manager [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Refreshing instance network info cache due to event network-changed-df14bc6f-60fa-4f13-a1ee-9addbfb494cd. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 822.896798] env[61855]: DEBUG oslo_concurrency.lockutils [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] Acquiring lock "refresh_cache-0d6f9828-e93a-474d-af31-f0ee6cb2149f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.896939] env[61855]: DEBUG oslo_concurrency.lockutils [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] Acquired lock "refresh_cache-0d6f9828-e93a-474d-af31-f0ee6cb2149f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.897110] env[61855]: DEBUG nova.network.neutron [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Refreshing network info cache for port df14bc6f-60fa-4f13-a1ee-9addbfb494cd {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 823.486164] env[61855]: DEBUG nova.network.neutron [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Updated VIF entry in instance network info cache for port df14bc6f-60fa-4f13-a1ee-9addbfb494cd. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 823.486793] env[61855]: DEBUG nova.network.neutron [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Updating instance_info_cache with network_info: [{"id": "df14bc6f-60fa-4f13-a1ee-9addbfb494cd", "address": "fa:16:3e:ab:d5:2c", "network": {"id": "b845f73b-3698-4875-b160-bfa31f58030e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-449135943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be0c282a8b4748b49377b714672e2cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "309d7cfa-b4da-4eec-9f4b-2e10d215fac7", "external-id": "nsx-vlan-transportzone-285", "segmentation_id": 285, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf14bc6f-60", "ovs_interfaceid": "df14bc6f-60fa-4f13-a1ee-9addbfb494cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.504244] env[61855]: DEBUG oslo_concurrency.lockutils [req-099d92e5-d61e-475a-811c-b565d398eb89 req-99aacf59-18a9-4aca-981d-796c5ebc16c9 service nova] Releasing lock "refresh_cache-0d6f9828-e93a-474d-af31-f0ee6cb2149f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.555095] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fd1649f1-f795-4e80-8aa2-5964048fb0f7 tempest-TenantUsagesTestJSON-110961510 tempest-TenantUsagesTestJSON-110961510-project-member] Acquiring lock "f3215717-1220-47a1-be3d-d1d5efcac656" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.555412] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fd1649f1-f795-4e80-8aa2-5964048fb0f7 tempest-TenantUsagesTestJSON-110961510 tempest-TenantUsagesTestJSON-110961510-project-member] Lock "f3215717-1220-47a1-be3d-d1d5efcac656" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.507541] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c3071069-b926-425d-a765-226b6a0d43b0 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] Acquiring lock "89517dc6-96e8-4e89-aa1c-cdd43e340551" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.507894] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c3071069-b926-425d-a765-226b6a0d43b0 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] Lock "89517dc6-96e8-4e89-aa1c-cdd43e340551" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.531964] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2f116cda-594f-4b47-9f5f-b932f768a2a8 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Acquiring lock "13e65b84-f5e2-4352-94ec-b37d0803e279" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.532618] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2f116cda-594f-4b47-9f5f-b932f768a2a8 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Lock "13e65b84-f5e2-4352-94ec-b37d0803e279" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.576794] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a1df8665-6c7b-4843-96dc-273cf3a4a48a tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Acquiring lock "21d8fcb1-84bc-4d93-8a17-24230e0ee8cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.577140] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a1df8665-6c7b-4843-96dc-273cf3a4a48a tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Lock "21d8fcb1-84bc-4d93-8a17-24230e0ee8cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.275626] env[61855]: DEBUG oslo_concurrency.lockutils [None req-29d03a1f-0ec9-4b2f-bce2-8bc90979bcf4 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Acquiring lock "faaec6a7-01f9-4f9b-992c-8c86a007b6aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.276322] env[61855]: DEBUG oslo_concurrency.lockutils [None req-29d03a1f-0ec9-4b2f-bce2-8bc90979bcf4 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Lock "faaec6a7-01f9-4f9b-992c-8c86a007b6aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.679087] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e7d6ed71-b85e-42de-9556-9e476166d75e tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] Acquiring lock "8767138c-2cc2-49ae-9f35-c433dfedcb45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.679395] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e7d6ed71-b85e-42de-9556-9e476166d75e tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] Lock "8767138c-2cc2-49ae-9f35-c433dfedcb45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.023517] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8f17b7d-2f2b-4f0d-b5ae-cc513e699635 tempest-ServersListShow296Test-1045640085 tempest-ServersListShow296Test-1045640085-project-member] Acquiring lock "6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.023797] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8f17b7d-2f2b-4f0d-b5ae-cc513e699635 tempest-ServersListShow296Test-1045640085 tempest-ServersListShow296Test-1045640085-project-member] Lock "6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.877086] env[61855]: DEBUG oslo_concurrency.lockutils [None req-78cd50a1-c927-4ee2-863a-667efd5b48a8 tempest-ServersAaction247Test-540627758 tempest-ServersAaction247Test-540627758-project-member] Acquiring lock "dbddb131-724e-44f0-ad59-9c7c1f6e4889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.877471] env[61855]: DEBUG oslo_concurrency.lockutils [None req-78cd50a1-c927-4ee2-863a-667efd5b48a8 tempest-ServersAaction247Test-540627758 tempest-ServersAaction247Test-540627758-project-member] Lock "dbddb131-724e-44f0-ad59-9c7c1f6e4889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.556447] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2ab3115e-c076-40a7-916d-3014d6898a89 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Acquiring lock "2c085a5c-d229-42e2-9155-ad5647110e07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.556745] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2ab3115e-c076-40a7-916d-3014d6898a89 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "2c085a5c-d229-42e2-9155-ad5647110e07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.638535] env[61855]: DEBUG oslo_concurrency.lockutils [None req-64f41831-9cd0-4835-9ca6-c4370c67b52f tempest-ServersV294TestFqdnHostnames-446546208 tempest-ServersV294TestFqdnHostnames-446546208-project-member] Acquiring lock "dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.639013] env[61855]: DEBUG oslo_concurrency.lockutils [None req-64f41831-9cd0-4835-9ca6-c4370c67b52f tempest-ServersV294TestFqdnHostnames-446546208 tempest-ServersV294TestFqdnHostnames-446546208-project-member] Lock "dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.995086] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d397419e-dda3-48f6-823b-404640b7b165 tempest-ServersTestFqdnHostnames-425384496 tempest-ServersTestFqdnHostnames-425384496-project-member] Acquiring lock "95bdfda9-d381-4a0f-bfde-57b423ff19c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.995412] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d397419e-dda3-48f6-823b-404640b7b165 tempest-ServersTestFqdnHostnames-425384496 tempest-ServersTestFqdnHostnames-425384496-project-member] Lock "95bdfda9-d381-4a0f-bfde-57b423ff19c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.210763] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07888221-052d-4e40-b07d-de7f522ab20c tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] Acquiring lock "ddb1501a-9afc-4916-ab4e-97b851b0f931" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.211132] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07888221-052d-4e40-b07d-de7f522ab20c tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] Lock "ddb1501a-9afc-4916-ab4e-97b851b0f931" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.637703] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de4f7f2b-aa3c-47e6-8367-193f7f1edac9 tempest-ServerActionsV293TestJSON-717373711 tempest-ServerActionsV293TestJSON-717373711-project-member] Acquiring lock "a0d8f45a-5b83-425a-b8ac-1d507a441bba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.637703] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de4f7f2b-aa3c-47e6-8367-193f7f1edac9 tempest-ServerActionsV293TestJSON-717373711 tempest-ServerActionsV293TestJSON-717373711-project-member] Lock "a0d8f45a-5b83-425a-b8ac-1d507a441bba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.645245] env[61855]: WARNING oslo_vmware.rw_handles [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 862.645245] env[61855]: ERROR oslo_vmware.rw_handles [ 862.645864] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 862.647327] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 862.647632] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Copying Virtual Disk [datastore2] vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/324f15a1-0875-4735-8850-a20b22c977b0/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 862.647872] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1724b9cf-c012-4f7c-8940-48c31324e85b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.657923] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Waiting for the task: (returnval){ [ 862.657923] env[61855]: value = "task-4302846" [ 862.657923] env[61855]: _type = "Task" [ 862.657923] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.666381] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Task: {'id': task-4302846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.168892] env[61855]: DEBUG oslo_vmware.exceptions [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 863.169205] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.169768] env[61855]: ERROR nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 863.169768] env[61855]: Faults: ['InvalidArgument'] [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Traceback (most recent call last): [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] yield resources [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self.driver.spawn(context, instance, image_meta, [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self._vmops.spawn(context, instance, image_meta, injected_files, [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self._fetch_image_if_missing(context, vi) [ 863.169768] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] image_cache(vi, tmp_image_ds_loc) [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] vm_util.copy_virtual_disk( [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] session._wait_for_task(vmdk_copy_task) [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] return self.wait_for_task(task_ref) [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] return evt.wait() [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] result = hub.switch() [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 863.170229] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] return self.greenlet.switch() [ 863.170648] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 863.170648] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self.f(*self.args, **self.kw) [ 863.170648] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 863.170648] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] raise exceptions.translate_fault(task_info.error) [ 863.170648] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 863.170648] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Faults: ['InvalidArgument'] [ 863.170648] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] [ 863.170648] env[61855]: INFO nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Terminating instance [ 863.171739] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.172039] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.172608] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 863.172796] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 863.173048] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf8f19a4-23c0-4cb6-a656-c8feb65a6947 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.175389] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c71f829-d480-4283-b8fa-622b6893f04b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.182830] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 863.183073] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7320fa4b-bf43-48ef-8687-a41abd4b17b5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.185296] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.185464] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 863.186442] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0131d41b-f490-45c5-bd49-185941e7ab4a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.191372] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 863.191372] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52d204e3-bc2f-e17a-45e6-1cc3e3c8ddec" [ 863.191372] env[61855]: _type = "Task" [ 863.191372] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.199284] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52d204e3-bc2f-e17a-45e6-1cc3e3c8ddec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.262909] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 863.263150] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 863.263346] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Deleting the datastore file [datastore2] 6e0adc99-63ad-4cca-b300-d67dc2928324 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 863.263729] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d557bc4-439c-47e5-b3b5-c81ba29fe2cc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.272518] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Waiting for the task: (returnval){ [ 863.272518] env[61855]: value = "task-4302848" [ 863.272518] env[61855]: _type = "Task" [ 863.272518] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.281595] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Task: {'id': task-4302848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.702223] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 863.702534] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating directory with path [datastore2] vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.702840] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e1b832e-f7c2-4a71-be05-c4043c2b4215 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.716149] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Created directory with path [datastore2] vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.716360] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Fetch image to [datastore2] vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 863.716533] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 863.717359] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597fd532-1a16-409a-8881-3a90ea48bc76 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.724846] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e831f33-77e7-4400-811a-5953082353a1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.734425] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50838088-a60c-427a-816d-e779cec3d3ad {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.768835] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b7e34f-8608-4cd7-bea1-c4bc8cfb3720 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.777879] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3172dc52-546e-4db5-919e-8ea25f0690a6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.782231] env[61855]: DEBUG oslo_vmware.api [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Task: {'id': task-4302848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065611} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.782833] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.783039] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 863.783217] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 863.784030] env[61855]: INFO nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Took 0.61 seconds to destroy the instance on the hypervisor. [ 863.786549] env[61855]: DEBUG nova.compute.claims [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 863.786725] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.786939] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.804293] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 863.854863] env[61855]: DEBUG nova.scheduler.client.report [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Refreshing inventories for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 863.858118] env[61855]: DEBUG oslo_vmware.rw_handles [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 863.913416] env[61855]: DEBUG nova.scheduler.client.report [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Updating ProviderTree inventory for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 863.913550] env[61855]: DEBUG nova.compute.provider_tree [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.918148] env[61855]: DEBUG oslo_vmware.rw_handles [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 863.918297] env[61855]: DEBUG oslo_vmware.rw_handles [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 863.926993] env[61855]: DEBUG nova.scheduler.client.report [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Refreshing aggregate associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, aggregates: None {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 863.948220] env[61855]: DEBUG nova.scheduler.client.report [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Refreshing trait associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 864.289423] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3b656a-7cca-48d6-9653-a02ae4ce1cee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.297611] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f17d24e-16cb-46cb-a068-72f9d3f6f69c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.328083] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb907eb-6bb2-42dc-9c8b-eaeecc5a1d1f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.335905] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaee5525-d4a6-4267-b5a9-9fdf0955a9fe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.351288] env[61855]: DEBUG nova.compute.provider_tree [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.360553] env[61855]: DEBUG nova.scheduler.client.report [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.374651] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.588s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.375187] env[61855]: ERROR nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 864.375187] env[61855]: Faults: ['InvalidArgument'] [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Traceback (most recent call last): [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self.driver.spawn(context, instance, image_meta, [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self._vmops.spawn(context, instance, image_meta, injected_files, [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self._fetch_image_if_missing(context, vi) [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] image_cache(vi, tmp_image_ds_loc) [ 864.375187] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] vm_util.copy_virtual_disk( [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] session._wait_for_task(vmdk_copy_task) [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] return self.wait_for_task(task_ref) [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] return evt.wait() [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] result = hub.switch() [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] return self.greenlet.switch() [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 864.375586] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] self.f(*self.args, **self.kw) [ 864.376019] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 864.376019] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] raise exceptions.translate_fault(task_info.error) [ 864.376019] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 864.376019] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Faults: ['InvalidArgument'] [ 864.376019] env[61855]: ERROR nova.compute.manager [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] [ 864.376019] env[61855]: DEBUG nova.compute.utils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 864.377608] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Build of instance 6e0adc99-63ad-4cca-b300-d67dc2928324 was re-scheduled: A specified parameter was not correct: fileType [ 864.377608] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 864.377979] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 864.378172] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 864.378345] env[61855]: DEBUG nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 864.378507] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.772475] env[61855]: DEBUG nova.network.neutron [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.786380] env[61855]: INFO nova.compute.manager [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Took 0.41 seconds to deallocate network for instance. [ 864.889460] env[61855]: INFO nova.scheduler.client.report [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Deleted allocations for instance 6e0adc99-63ad-4cca-b300-d67dc2928324 [ 864.911465] env[61855]: DEBUG oslo_concurrency.lockutils [None req-87c3d756-3c3b-40b7-afd2-bfef276fb980 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 294.967s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.913186] env[61855]: DEBUG oslo_concurrency.lockutils [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 95.780s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.913373] env[61855]: DEBUG oslo_concurrency.lockutils [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Acquiring lock "6e0adc99-63ad-4cca-b300-d67dc2928324-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.914709] env[61855]: DEBUG oslo_concurrency.lockutils [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.914709] env[61855]: DEBUG oslo_concurrency.lockutils [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.915788] env[61855]: INFO nova.compute.manager [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Terminating instance [ 864.917554] env[61855]: DEBUG nova.compute.manager [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 864.917711] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 864.918229] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-340fc82d-fa3b-4566-8704-dd9b8714f55b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.930326] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c802f2b-b354-4cd2-bfe0-59600a8d0a51 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.942678] env[61855]: DEBUG nova.compute.manager [None req-deef5b02-bbeb-4ba1-a138-24d91920eedd tempest-ServersTestManualDisk-1103417019 tempest-ServersTestManualDisk-1103417019-project-member] [instance: 0c391391-3357-41d5-995b-70accf3aa2a9] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 864.966503] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6e0adc99-63ad-4cca-b300-d67dc2928324 could not be found. [ 864.966769] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 864.966978] env[61855]: INFO nova.compute.manager [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Took 0.05 seconds to destroy the instance on the hypervisor. [ 864.967507] env[61855]: DEBUG oslo.service.loopingcall [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.967507] env[61855]: DEBUG nova.compute.manager [-] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 864.967616] env[61855]: DEBUG nova.network.neutron [-] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.971172] env[61855]: DEBUG nova.compute.manager [None req-deef5b02-bbeb-4ba1-a138-24d91920eedd tempest-ServersTestManualDisk-1103417019 tempest-ServersTestManualDisk-1103417019-project-member] [instance: 0c391391-3357-41d5-995b-70accf3aa2a9] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 864.999041] env[61855]: DEBUG nova.network.neutron [-] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.014358] env[61855]: INFO nova.compute.manager [-] [instance: 6e0adc99-63ad-4cca-b300-d67dc2928324] Took 0.05 seconds to deallocate network for instance. [ 865.020877] env[61855]: DEBUG oslo_concurrency.lockutils [None req-deef5b02-bbeb-4ba1-a138-24d91920eedd tempest-ServersTestManualDisk-1103417019 tempest-ServersTestManualDisk-1103417019-project-member] Lock "0c391391-3357-41d5-995b-70accf3aa2a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.771s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.030149] env[61855]: DEBUG nova.compute.manager [None req-2d6e0e1f-966c-4c4f-9973-29b0d4b26c43 tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] [instance: 21b0c6a6-05f7-4d92-9e2e-7ec332a4337a] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.067361] env[61855]: DEBUG nova.compute.manager [None req-2d6e0e1f-966c-4c4f-9973-29b0d4b26c43 tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] [instance: 21b0c6a6-05f7-4d92-9e2e-7ec332a4337a] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.093189] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2d6e0e1f-966c-4c4f-9973-29b0d4b26c43 tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] Lock "21b0c6a6-05f7-4d92-9e2e-7ec332a4337a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.882s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.106751] env[61855]: DEBUG nova.compute.manager [None req-25bbf440-5ad5-40b3-b686-303c3c822b85 tempest-ServersAdminNegativeTestJSON-91051523 tempest-ServersAdminNegativeTestJSON-91051523-project-member] [instance: a2337e7e-3cc9-4427-99fc-b2990dca9cd0] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.144015] env[61855]: DEBUG nova.compute.manager [None req-25bbf440-5ad5-40b3-b686-303c3c822b85 tempest-ServersAdminNegativeTestJSON-91051523 tempest-ServersAdminNegativeTestJSON-91051523-project-member] [instance: a2337e7e-3cc9-4427-99fc-b2990dca9cd0] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.167244] env[61855]: DEBUG oslo_concurrency.lockutils [None req-126f46a1-74d5-4713-b86b-ce8935cab271 tempest-ServerAddressesTestJSON-223279044 tempest-ServerAddressesTestJSON-223279044-project-member] Lock "6e0adc99-63ad-4cca-b300-d67dc2928324" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.254s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.174967] env[61855]: DEBUG oslo_concurrency.lockutils [None req-25bbf440-5ad5-40b3-b686-303c3c822b85 tempest-ServersAdminNegativeTestJSON-91051523 tempest-ServersAdminNegativeTestJSON-91051523-project-member] Lock "a2337e7e-3cc9-4427-99fc-b2990dca9cd0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.757s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.184156] env[61855]: DEBUG nova.compute.manager [None req-78d4bb6c-9018-4520-9cb0-8d9c23f98395 tempest-ImagesOneServerNegativeTestJSON-489246136 tempest-ImagesOneServerNegativeTestJSON-489246136-project-member] [instance: b4b12e43-e240-4782-9041-8887334e6361] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.207067] env[61855]: DEBUG nova.compute.manager [None req-78d4bb6c-9018-4520-9cb0-8d9c23f98395 tempest-ImagesOneServerNegativeTestJSON-489246136 tempest-ImagesOneServerNegativeTestJSON-489246136-project-member] [instance: b4b12e43-e240-4782-9041-8887334e6361] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.227525] env[61855]: DEBUG oslo_concurrency.lockutils [None req-78d4bb6c-9018-4520-9cb0-8d9c23f98395 tempest-ImagesOneServerNegativeTestJSON-489246136 tempest-ImagesOneServerNegativeTestJSON-489246136-project-member] Lock "b4b12e43-e240-4782-9041-8887334e6361" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.237935] env[61855]: DEBUG nova.compute.manager [None req-8b7938dc-d9c2-4da6-be61-cb9023404eb9 tempest-ServerActionsTestOtherB-1742061393 tempest-ServerActionsTestOtherB-1742061393-project-member] [instance: 5ae9308d-1e1f-49ab-aafc-022a936d2f15] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.267226] env[61855]: DEBUG nova.compute.manager [None req-8b7938dc-d9c2-4da6-be61-cb9023404eb9 tempest-ServerActionsTestOtherB-1742061393 tempest-ServerActionsTestOtherB-1742061393-project-member] [instance: 5ae9308d-1e1f-49ab-aafc-022a936d2f15] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.297123] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8b7938dc-d9c2-4da6-be61-cb9023404eb9 tempest-ServerActionsTestOtherB-1742061393 tempest-ServerActionsTestOtherB-1742061393-project-member] Lock "5ae9308d-1e1f-49ab-aafc-022a936d2f15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.940s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.307392] env[61855]: DEBUG nova.compute.manager [None req-fd456e19-9a64-4a30-babe-4f48751c4bb9 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: 8fb8bacf-aca4-4696-971e-559f85c002b3] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.331957] env[61855]: DEBUG nova.compute.manager [None req-fd456e19-9a64-4a30-babe-4f48751c4bb9 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: 8fb8bacf-aca4-4696-971e-559f85c002b3] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.352849] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fd456e19-9a64-4a30-babe-4f48751c4bb9 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "8fb8bacf-aca4-4696-971e-559f85c002b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.768s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.363545] env[61855]: DEBUG nova.compute.manager [None req-9ed2e9e2-6aa1-4e63-8419-aa21b0a36b27 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] [instance: 3568e9ec-c87c-4831-bd20-d3cfab106e0d] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.386245] env[61855]: DEBUG nova.compute.manager [None req-9ed2e9e2-6aa1-4e63-8419-aa21b0a36b27 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] [instance: 3568e9ec-c87c-4831-bd20-d3cfab106e0d] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.407745] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9ed2e9e2-6aa1-4e63-8419-aa21b0a36b27 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] Lock "3568e9ec-c87c-4831-bd20-d3cfab106e0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.302s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.427986] env[61855]: DEBUG nova.compute.manager [None req-229300d9-c4e9-4fe6-8c63-30ce2b849c81 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] [instance: 00e8d25c-9c28-47ee-8fd7-8734df1a9a3a] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.453151] env[61855]: DEBUG nova.compute.manager [None req-229300d9-c4e9-4fe6-8c63-30ce2b849c81 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] [instance: 00e8d25c-9c28-47ee-8fd7-8734df1a9a3a] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.474831] env[61855]: DEBUG oslo_concurrency.lockutils [None req-229300d9-c4e9-4fe6-8c63-30ce2b849c81 tempest-ListImageFiltersTestJSON-1785705134 tempest-ListImageFiltersTestJSON-1785705134-project-member] Lock "00e8d25c-9c28-47ee-8fd7-8734df1a9a3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.105s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.484066] env[61855]: DEBUG nova.compute.manager [None req-e16987a3-8a9b-453e-a00e-f36127d1a7dd tempest-ServersTestBootFromVolume-1706340137 tempest-ServersTestBootFromVolume-1706340137-project-member] [instance: b78cec38-feba-4723-b735-1d9afc5edadc] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.513516] env[61855]: DEBUG nova.compute.manager [None req-e16987a3-8a9b-453e-a00e-f36127d1a7dd tempest-ServersTestBootFromVolume-1706340137 tempest-ServersTestBootFromVolume-1706340137-project-member] [instance: b78cec38-feba-4723-b735-1d9afc5edadc] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 865.534687] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e16987a3-8a9b-453e-a00e-f36127d1a7dd tempest-ServersTestBootFromVolume-1706340137 tempest-ServersTestBootFromVolume-1706340137-project-member] Lock "b78cec38-feba-4723-b735-1d9afc5edadc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.126s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.544383] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 865.593047] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.593193] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.594656] env[61855]: INFO nova.compute.claims [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.978055] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68e4d88-2623-40a7-a50f-6644b84fd472 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.986550] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c471aa6b-31fc-46e1-8f04-f5f19aecc91d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.016913] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60dcf23c-4dcf-4972-9692-d5e86f430b67 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.025022] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746ac55d-c364-4f08-9ddf-23094e7939db {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.038800] env[61855]: DEBUG nova.compute.provider_tree [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.047524] env[61855]: DEBUG nova.scheduler.client.report [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.063349] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.470s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.064181] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 866.101695] env[61855]: DEBUG nova.compute.utils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 866.103361] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 866.103470] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 866.113954] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 866.183757] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 866.188786] env[61855]: DEBUG nova.policy [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee2bdbf7c18848e89884e2adaf8233d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55e6da686f974fcb97990523a31bf149', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 866.214306] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.214597] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.214763] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.214949] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.215109] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.215262] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.215470] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.215633] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.215807] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.215977] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.216209] env[61855]: DEBUG nova.virt.hardware [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.217090] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8807f7d1-2993-45dc-bd6d-c2e6f8436854 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.226317] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b533ae6-de65-4dfc-abfe-b25c37172e1e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.654275] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Successfully created port: 7e8b7a2c-3d18-4b78-8224-3185fa182064 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.784796] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Successfully updated port: 7e8b7a2c-3d18-4b78-8224-3185fa182064 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.803455] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "refresh_cache-29ae12b6-adc9-4a25-8a89-9a88470b3818" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.803455] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquired lock "refresh_cache-29ae12b6-adc9-4a25-8a89-9a88470b3818" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.803455] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 867.886772] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 867.974254] env[61855]: DEBUG nova.compute.manager [req-22078d4e-589d-442a-b163-44306543f0b3 req-f3f1601a-d8ce-403f-b942-7920c0bdfb10 service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Received event network-vif-plugged-7e8b7a2c-3d18-4b78-8224-3185fa182064 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 867.974582] env[61855]: DEBUG oslo_concurrency.lockutils [req-22078d4e-589d-442a-b163-44306543f0b3 req-f3f1601a-d8ce-403f-b942-7920c0bdfb10 service nova] Acquiring lock "29ae12b6-adc9-4a25-8a89-9a88470b3818-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.974844] env[61855]: DEBUG oslo_concurrency.lockutils [req-22078d4e-589d-442a-b163-44306543f0b3 req-f3f1601a-d8ce-403f-b942-7920c0bdfb10 service nova] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.975086] env[61855]: DEBUG oslo_concurrency.lockutils [req-22078d4e-589d-442a-b163-44306543f0b3 req-f3f1601a-d8ce-403f-b942-7920c0bdfb10 service nova] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.976627] env[61855]: DEBUG nova.compute.manager [req-22078d4e-589d-442a-b163-44306543f0b3 req-f3f1601a-d8ce-403f-b942-7920c0bdfb10 service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] No waiting events found dispatching network-vif-plugged-7e8b7a2c-3d18-4b78-8224-3185fa182064 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 867.977665] env[61855]: WARNING nova.compute.manager [req-22078d4e-589d-442a-b163-44306543f0b3 req-f3f1601a-d8ce-403f-b942-7920c0bdfb10 service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Received unexpected event network-vif-plugged-7e8b7a2c-3d18-4b78-8224-3185fa182064 for instance with vm_state building and task_state spawning. [ 868.131960] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Updating instance_info_cache with network_info: [{"id": "7e8b7a2c-3d18-4b78-8224-3185fa182064", "address": "fa:16:3e:53:cf:d8", "network": {"id": "5928ac77-b700-404c-9e6f-7899b23ce28a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1436402704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55e6da686f974fcb97990523a31bf149", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e8b7a2c-3d", "ovs_interfaceid": "7e8b7a2c-3d18-4b78-8224-3185fa182064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.156625] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Releasing lock "refresh_cache-29ae12b6-adc9-4a25-8a89-9a88470b3818" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.156952] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Instance network_info: |[{"id": "7e8b7a2c-3d18-4b78-8224-3185fa182064", "address": "fa:16:3e:53:cf:d8", "network": {"id": "5928ac77-b700-404c-9e6f-7899b23ce28a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1436402704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55e6da686f974fcb97990523a31bf149", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e8b7a2c-3d", "ovs_interfaceid": "7e8b7a2c-3d18-4b78-8224-3185fa182064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 868.157444] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:cf:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a8f5363-be3a-4f92-9ccf-33bb0c8113b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e8b7a2c-3d18-4b78-8224-3185fa182064', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 868.165165] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Creating folder: Project (55e6da686f974fcb97990523a31bf149). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 868.165877] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84e64a52-7c25-45c1-bca6-933bb7be3ace {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.177576] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Created folder: Project (55e6da686f974fcb97990523a31bf149) in parent group-v847048. [ 868.177772] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Creating folder: Instances. Parent ref: group-v847100. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 868.177997] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-380c9766-4f80-49bc-a099-9c44e2279eec {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.187853] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Created folder: Instances in parent group-v847100. [ 868.188113] env[61855]: DEBUG oslo.service.loopingcall [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.188297] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 868.188497] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7865cab-2bee-4d7c-8458-69b2e5ae4f7d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.210341] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 868.210341] env[61855]: value = "task-4302851" [ 868.210341] env[61855]: _type = "Task" [ 868.210341] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.218227] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302851, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.720681] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302851, 'name': CreateVM_Task, 'duration_secs': 0.285834} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.720838] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 868.721577] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.721733] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.722058] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.722851] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bd90076-d2d2-4c3a-9fa8-742f00289017 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.727517] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Waiting for the task: (returnval){ [ 868.727517] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b9a7a1-6b72-68a8-11a0-7cb09116657a" [ 868.727517] env[61855]: _type = "Task" [ 868.727517] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.735599] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b9a7a1-6b72-68a8-11a0-7cb09116657a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.239798] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.240936] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.240936] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.021666] env[61855]: DEBUG nova.compute.manager [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Received event network-changed-7e8b7a2c-3d18-4b78-8224-3185fa182064 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 870.021889] env[61855]: DEBUG nova.compute.manager [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Refreshing instance network info cache due to event network-changed-7e8b7a2c-3d18-4b78-8224-3185fa182064. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 870.022095] env[61855]: DEBUG oslo_concurrency.lockutils [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] Acquiring lock "refresh_cache-29ae12b6-adc9-4a25-8a89-9a88470b3818" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.022234] env[61855]: DEBUG oslo_concurrency.lockutils [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] Acquired lock "refresh_cache-29ae12b6-adc9-4a25-8a89-9a88470b3818" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.022398] env[61855]: DEBUG nova.network.neutron [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Refreshing network info cache for port 7e8b7a2c-3d18-4b78-8224-3185fa182064 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 870.404191] env[61855]: DEBUG nova.network.neutron [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Updated VIF entry in instance network info cache for port 7e8b7a2c-3d18-4b78-8224-3185fa182064. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 870.406069] env[61855]: DEBUG nova.network.neutron [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Updating instance_info_cache with network_info: [{"id": "7e8b7a2c-3d18-4b78-8224-3185fa182064", "address": "fa:16:3e:53:cf:d8", "network": {"id": "5928ac77-b700-404c-9e6f-7899b23ce28a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1436402704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55e6da686f974fcb97990523a31bf149", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e8b7a2c-3d", "ovs_interfaceid": "7e8b7a2c-3d18-4b78-8224-3185fa182064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.424433] env[61855]: DEBUG oslo_concurrency.lockutils [req-40dc1b1a-64d4-4b33-9334-d222c49319ea req-044fdb5e-8ac6-4b45-b07f-5f4d1e43e8cb service nova] Releasing lock "refresh_cache-29ae12b6-adc9-4a25-8a89-9a88470b3818" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.682662] env[61855]: DEBUG oslo_concurrency.lockutils [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.528173] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "adfd94b5-7e03-49d1-a445-c58b296e5185" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.528549] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.919013] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 878.923747] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.924910] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.936606] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.936840] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.937019] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.937186] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 879.938323] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625889d0-30ee-44e3-9e09-56f894a808e3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.947159] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd1363f-41e9-4a20-af6d-e00b6ea5e368 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.960789] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcc17f1-045c-4f48-b9e7-991af6cd5556 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.967213] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5903d691-d270-4f05-919b-53ca711ec588 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.997768] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180611MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 879.997768] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.997768] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.074682] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075090] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e21722cc-672b-4f8a-9f78-e50ac83071a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075090] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 311d492c-0093-4d64-a56f-80fce95b809a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075090] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6c15201d-7373-4040-9256-84ff11fcfed2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075291] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075334] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075435] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075550] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075665] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.075776] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 880.087736] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 850493b3-7c3d-4b01-a807-bc4cacb0cb5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.098808] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.110406] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.122739] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.133075] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b693f06d-14c8-49f7-8870-8b440908de74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.143044] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f3215717-1220-47a1-be3d-d1d5efcac656 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.152893] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 89517dc6-96e8-4e89-aa1c-cdd43e340551 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.162807] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 13e65b84-f5e2-4352-94ec-b37d0803e279 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.171967] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 21d8fcb1-84bc-4d93-8a17-24230e0ee8cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.181804] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance faaec6a7-01f9-4f9b-992c-8c86a007b6aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.191162] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8767138c-2cc2-49ae-9f35-c433dfedcb45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.200358] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.209019] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance dbddb131-724e-44f0-ad59-9c7c1f6e4889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.218437] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2c085a5c-d229-42e2-9155-ad5647110e07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.228025] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.239653] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 95bdfda9-d381-4a0f-bfde-57b423ff19c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.248256] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ddb1501a-9afc-4916-ab4e-97b851b0f931 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.257704] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a0d8f45a-5b83-425a-b8ac-1d507a441bba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.269039] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 880.269283] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 880.269428] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 880.598363] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f76b5d-4c61-4fc9-8c50-dd7d8445f330 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.605907] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5c0dd1-b450-483c-ba03-398c0d3f49a0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.636486] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5494c9ec-35be-4333-960e-adf067b65ff1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.643631] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0de8bd2-e686-4344-8ef6-3a42c94a6561 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.656564] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.664915] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.681872] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 880.681872] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.684s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.682065] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.682065] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.682065] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.682567] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 881.919433] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.944281] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.944552] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 881.944700] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 881.965633] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.965796] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.965934] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966076] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966218] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966418] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966480] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966621] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966691] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966807] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 881.966928] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 881.967379] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.967557] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.662571] env[61855]: WARNING oslo_vmware.rw_handles [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 912.662571] env[61855]: ERROR oslo_vmware.rw_handles [ 912.663435] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 912.664905] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 912.665522] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Copying Virtual Disk [datastore2] vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/3577c5ab-358c-43a4-843b-b391e0eb3adc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 912.665700] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09ad3421-c439-4c57-b55c-993b73958f4e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.674205] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 912.674205] env[61855]: value = "task-4302852" [ 912.674205] env[61855]: _type = "Task" [ 912.674205] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.685816] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': task-4302852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.185510] env[61855]: DEBUG oslo_vmware.exceptions [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 913.185813] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.186429] env[61855]: ERROR nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 913.186429] env[61855]: Faults: ['InvalidArgument'] [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Traceback (most recent call last): [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] yield resources [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self.driver.spawn(context, instance, image_meta, [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self._fetch_image_if_missing(context, vi) [ 913.186429] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] image_cache(vi, tmp_image_ds_loc) [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] vm_util.copy_virtual_disk( [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] session._wait_for_task(vmdk_copy_task) [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] return self.wait_for_task(task_ref) [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] return evt.wait() [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] result = hub.switch() [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 913.186870] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] return self.greenlet.switch() [ 913.187274] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 913.187274] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self.f(*self.args, **self.kw) [ 913.187274] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 913.187274] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] raise exceptions.translate_fault(task_info.error) [ 913.187274] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 913.187274] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Faults: ['InvalidArgument'] [ 913.187274] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] [ 913.187274] env[61855]: INFO nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Terminating instance [ 913.188927] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.189587] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 913.190298] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 913.190518] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 913.190769] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d97c430b-0b83-440b-87e5-12052f1fe281 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.193233] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d87370-667f-4e33-8110-dac904fba26c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.201209] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 913.201449] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd74504f-0b1b-4366-8755-b7f33837382a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.203704] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 913.203874] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 913.204879] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76a5c22b-54b9-41e6-9155-1fab3ce2f6d1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.210493] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 913.210493] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a36bdd-f1b1-2f98-e360-35b0c9f3942d" [ 913.210493] env[61855]: _type = "Task" [ 913.210493] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.219368] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a36bdd-f1b1-2f98-e360-35b0c9f3942d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.271969] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 913.272260] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 913.272461] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Deleting the datastore file [datastore2] e21722cc-672b-4f8a-9f78-e50ac83071a5 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.272735] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ea1e951-b4ef-41f8-88ff-c53fc9e837df {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.278670] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 913.278670] env[61855]: value = "task-4302854" [ 913.278670] env[61855]: _type = "Task" [ 913.278670] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.286556] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': task-4302854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.722267] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 913.722927] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating directory with path [datastore2] vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 913.722927] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-072218c6-a934-43bf-bc56-c6ccd8716f52 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.734982] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Created directory with path [datastore2] vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 913.735171] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Fetch image to [datastore2] vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 913.735359] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 913.736939] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2243d81-3d3c-4e9b-b55e-f03fb043aea0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.743545] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b74790-4905-4920-b651-b1821b0634a8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.753015] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf7a853-d6e9-4d1a-8d41-2eeca48e51f7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.786289] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ffa20e-e41a-4b9c-a480-e0e98f2c1c20 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.793598] env[61855]: DEBUG oslo_vmware.api [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': task-4302854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069059} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.795180] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.795365] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 913.795542] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 913.795899] env[61855]: INFO nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 913.797812] env[61855]: DEBUG nova.compute.claims [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 913.797976] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.798225] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.802087] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7b2d0f8c-c2f7-4bb2-98e9-087f0a6d16f9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.831133] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 913.902450] env[61855]: DEBUG oslo_vmware.rw_handles [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 913.971677] env[61855]: DEBUG oslo_vmware.rw_handles [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 913.975026] env[61855]: DEBUG oslo_vmware.rw_handles [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 914.354227] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f969c67-5d6c-4d12-9e57-e864c4580901 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.362801] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89176a9-13be-462e-8c23-da9b972b6b75 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.395521] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00c557e-0ee1-4bab-a2ea-24ea15181dd5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.402881] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb534f9-4145-49a5-8425-4e5f000e0442 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.415869] env[61855]: DEBUG nova.compute.provider_tree [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.426392] env[61855]: DEBUG nova.scheduler.client.report [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 914.443966] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.646s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.444573] env[61855]: ERROR nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 914.444573] env[61855]: Faults: ['InvalidArgument'] [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Traceback (most recent call last): [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self.driver.spawn(context, instance, image_meta, [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self._fetch_image_if_missing(context, vi) [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] image_cache(vi, tmp_image_ds_loc) [ 914.444573] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] vm_util.copy_virtual_disk( [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] session._wait_for_task(vmdk_copy_task) [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] return self.wait_for_task(task_ref) [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] return evt.wait() [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] result = hub.switch() [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] return self.greenlet.switch() [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 914.444964] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] self.f(*self.args, **self.kw) [ 914.445393] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 914.445393] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] raise exceptions.translate_fault(task_info.error) [ 914.445393] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 914.445393] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Faults: ['InvalidArgument'] [ 914.445393] env[61855]: ERROR nova.compute.manager [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] [ 914.445393] env[61855]: DEBUG nova.compute.utils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 914.448989] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Build of instance e21722cc-672b-4f8a-9f78-e50ac83071a5 was re-scheduled: A specified parameter was not correct: fileType [ 914.448989] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 914.448989] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 914.448989] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 914.448989] env[61855]: DEBUG nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 914.449260] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 915.156590] env[61855]: DEBUG nova.network.neutron [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.171416] env[61855]: INFO nova.compute.manager [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Took 0.72 seconds to deallocate network for instance. [ 915.298312] env[61855]: INFO nova.scheduler.client.report [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Deleted allocations for instance e21722cc-672b-4f8a-9f78-e50ac83071a5 [ 915.336391] env[61855]: DEBUG oslo_concurrency.lockutils [None req-22afdeaa-5124-4f8e-adbc-f93d0af0d66b tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 337.124s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.337743] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 136.744s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.337965] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "e21722cc-672b-4f8a-9f78-e50ac83071a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.338543] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.338543] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.346538] env[61855]: INFO nova.compute.manager [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Terminating instance [ 915.348949] env[61855]: DEBUG nova.compute.manager [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 915.349426] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 915.349481] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc6840d9-037d-4ed4-807d-b5213564d135 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.352132] env[61855]: DEBUG nova.compute.manager [None req-12cd4ca2-e3e6-494b-9daf-05d6757feeca tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 850493b3-7c3d-4b01-a807-bc4cacb0cb5f] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 915.362440] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1962ab5-d44a-44e9-90db-0e9a68afbee0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.377896] env[61855]: DEBUG nova.compute.manager [None req-12cd4ca2-e3e6-494b-9daf-05d6757feeca tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] [instance: 850493b3-7c3d-4b01-a807-bc4cacb0cb5f] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 915.393194] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e21722cc-672b-4f8a-9f78-e50ac83071a5 could not be found. [ 915.393455] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 915.393642] env[61855]: INFO nova.compute.manager [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 915.393900] env[61855]: DEBUG oslo.service.loopingcall [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 915.394227] env[61855]: DEBUG nova.compute.manager [-] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 915.394289] env[61855]: DEBUG nova.network.neutron [-] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 915.411270] env[61855]: DEBUG oslo_concurrency.lockutils [None req-12cd4ca2-e3e6-494b-9daf-05d6757feeca tempest-DeleteServersAdminTestJSON-1320392767 tempest-DeleteServersAdminTestJSON-1320392767-project-member] Lock "850493b3-7c3d-4b01-a807-bc4cacb0cb5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.553s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.421028] env[61855]: DEBUG nova.network.neutron [-] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.427167] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 915.431536] env[61855]: INFO nova.compute.manager [-] [instance: e21722cc-672b-4f8a-9f78-e50ac83071a5] Took 0.04 seconds to deallocate network for instance. [ 915.488227] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.488227] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.489906] env[61855]: INFO nova.compute.claims [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.542521] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5ef2622c-2c30-43bc-a9f6-eec790a05bf6 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "e21722cc-672b-4f8a-9f78-e50ac83071a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.938115] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36eecd04-d9d7-474c-8bdd-97ee623d64a4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.950795] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8cb26b-fb9a-4a3e-b2c3-3b2c87447a30 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.986709] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53231c6-8a4b-42a2-9cf4-d9352f3f76dd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.996385] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8575acf9-c9b2-4e95-a2a7-c47960ed2492 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.012174] env[61855]: DEBUG nova.compute.provider_tree [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.021966] env[61855]: DEBUG nova.scheduler.client.report [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 916.040060] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.552s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.040229] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 916.105845] env[61855]: DEBUG nova.compute.utils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 916.109736] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 916.109736] env[61855]: DEBUG nova.network.neutron [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 916.120547] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 916.235119] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 916.245548] env[61855]: DEBUG nova.policy [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa4687cadf74da8879e45180bc53075', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '393b6bf5812d452485a233ff672fbf01', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 916.274365] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:31:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='8ec81ba5-9ad7-4f7f-a0b9-8b91db81a9a6',id=36,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-733307803',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 916.274625] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 916.274784] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.274967] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 916.275296] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.275528] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 916.275797] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 916.276039] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 916.276298] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 916.276556] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 916.276794] env[61855]: DEBUG nova.virt.hardware [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 916.278048] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61781746-013d-46bc-b929-dd03fce96831 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.286967] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e21789-7931-4fa4-a48c-67e61354f1ad {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.445282] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.746780] env[61855]: DEBUG nova.network.neutron [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Successfully created port: bef39f74-a00b-4a06-bfac-55ec6dee80cc {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.677244] env[61855]: DEBUG nova.network.neutron [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Successfully updated port: bef39f74-a00b-4a06-bfac-55ec6dee80cc {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 917.688670] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "refresh_cache-9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.688849] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired lock "refresh_cache-9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.688971] env[61855]: DEBUG nova.network.neutron [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 917.765167] env[61855]: DEBUG nova.network.neutron [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 917.861726] env[61855]: DEBUG nova.compute.manager [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Received event network-vif-plugged-bef39f74-a00b-4a06-bfac-55ec6dee80cc {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 917.861952] env[61855]: DEBUG oslo_concurrency.lockutils [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] Acquiring lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.862170] env[61855]: DEBUG oslo_concurrency.lockutils [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.862668] env[61855]: DEBUG oslo_concurrency.lockutils [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.862668] env[61855]: DEBUG nova.compute.manager [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] No waiting events found dispatching network-vif-plugged-bef39f74-a00b-4a06-bfac-55ec6dee80cc {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 917.862807] env[61855]: WARNING nova.compute.manager [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Received unexpected event network-vif-plugged-bef39f74-a00b-4a06-bfac-55ec6dee80cc for instance with vm_state building and task_state spawning. [ 917.862898] env[61855]: DEBUG nova.compute.manager [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Received event network-changed-bef39f74-a00b-4a06-bfac-55ec6dee80cc {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 917.863060] env[61855]: DEBUG nova.compute.manager [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Refreshing instance network info cache due to event network-changed-bef39f74-a00b-4a06-bfac-55ec6dee80cc. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 917.863443] env[61855]: DEBUG oslo_concurrency.lockutils [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] Acquiring lock "refresh_cache-9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.997222] env[61855]: DEBUG nova.network.neutron [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Updating instance_info_cache with network_info: [{"id": "bef39f74-a00b-4a06-bfac-55ec6dee80cc", "address": "fa:16:3e:86:d9:ff", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef39f74-a0", "ovs_interfaceid": "bef39f74-a00b-4a06-bfac-55ec6dee80cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.011884] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Releasing lock "refresh_cache-9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.014347] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Instance network_info: |[{"id": "bef39f74-a00b-4a06-bfac-55ec6dee80cc", "address": "fa:16:3e:86:d9:ff", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef39f74-a0", "ovs_interfaceid": "bef39f74-a00b-4a06-bfac-55ec6dee80cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 918.014347] env[61855]: DEBUG oslo_concurrency.lockutils [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] Acquired lock "refresh_cache-9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.014618] env[61855]: DEBUG nova.network.neutron [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Refreshing network info cache for port bef39f74-a00b-4a06-bfac-55ec6dee80cc {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 918.014618] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:d9:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bef39f74-a00b-4a06-bfac-55ec6dee80cc', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.027998] env[61855]: DEBUG oslo.service.loopingcall [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.028971] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 918.031617] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1a05a11-09d0-4cb3-9d40-c0ee6794002e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.056839] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.056839] env[61855]: value = "task-4302855" [ 918.056839] env[61855]: _type = "Task" [ 918.056839] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.064712] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302855, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.568032] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302855, 'name': CreateVM_Task, 'duration_secs': 0.30836} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.568281] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 918.569031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.569263] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.569645] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.569934] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2115cf4-1956-4300-bebc-6bd602f139f4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.574817] env[61855]: DEBUG oslo_vmware.api [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for the task: (returnval){ [ 918.574817] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5284f62a-2d3f-d166-c745-ff15293e3b7b" [ 918.574817] env[61855]: _type = "Task" [ 918.574817] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.582477] env[61855]: DEBUG oslo_vmware.api [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5284f62a-2d3f-d166-c745-ff15293e3b7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.669018] env[61855]: DEBUG nova.network.neutron [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Updated VIF entry in instance network info cache for port bef39f74-a00b-4a06-bfac-55ec6dee80cc. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 918.669403] env[61855]: DEBUG nova.network.neutron [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Updating instance_info_cache with network_info: [{"id": "bef39f74-a00b-4a06-bfac-55ec6dee80cc", "address": "fa:16:3e:86:d9:ff", "network": {"id": "82a223e6-bf9a-49d7-97f3-dc11fef19370", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9af591690d3f45af8302cad2a4578940", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef39f74-a0", "ovs_interfaceid": "bef39f74-a00b-4a06-bfac-55ec6dee80cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.684230] env[61855]: DEBUG oslo_concurrency.lockutils [req-429f3a48-8fdc-402c-ade8-b4341d882661 req-ebae294b-0906-4c6c-957c-82da99c53830 service nova] Releasing lock "refresh_cache-9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.085394] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.085651] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.085864] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.176377] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.176645] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.016096] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f2e6ce4e-6ff0-42fe-8453-b8ac7cdb29c6 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.016096] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f2e6ce4e-6ff0-42fe-8453-b8ac7cdb29c6 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.968968] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 939.924798] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 939.942661] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.942913] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.943109] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.943270] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 939.944406] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea4e3c4-aefb-467c-be03-c9910f0012ab {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.958534] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d302e5bc-5ebc-4c03-9302-eb059e2cbc2b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.973082] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a658b0-574d-4b8f-9a81-66267492c547 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.979507] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2388d2dc-3c37-4f48-9edc-7b28b180d7bc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.008487] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180647MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 940.008647] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.008856] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.107056] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.107238] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 311d492c-0093-4d64-a56f-80fce95b809a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.107367] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6c15201d-7373-4040-9256-84ff11fcfed2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.107491] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.107614] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.107734] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.107935] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.107964] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.108101] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.108423] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 940.121531] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.132646] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.144193] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b693f06d-14c8-49f7-8870-8b440908de74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.153824] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f3215717-1220-47a1-be3d-d1d5efcac656 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.163899] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 89517dc6-96e8-4e89-aa1c-cdd43e340551 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.174029] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 13e65b84-f5e2-4352-94ec-b37d0803e279 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.184991] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 21d8fcb1-84bc-4d93-8a17-24230e0ee8cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.194441] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance faaec6a7-01f9-4f9b-992c-8c86a007b6aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.205033] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8767138c-2cc2-49ae-9f35-c433dfedcb45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.214958] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.225825] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance dbddb131-724e-44f0-ad59-9c7c1f6e4889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.236220] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2c085a5c-d229-42e2-9155-ad5647110e07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.245959] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.256289] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 95bdfda9-d381-4a0f-bfde-57b423ff19c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.266300] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ddb1501a-9afc-4916-ab4e-97b851b0f931 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.276132] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a0d8f45a-5b83-425a-b8ac-1d507a441bba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.287331] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.296211] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.305601] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 940.305834] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 940.305979] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 940.636412] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebc4dd9-0fc8-45fd-8f84-8e93e016d118 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.644298] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d433d1e-e739-4cc5-88cf-a5368b4d97c2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.673449] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7b1a57-3b4d-4e77-9fd3-b76a4464db82 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.680226] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8206e1d-0692-4556-a791-497ba03b73b4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.692751] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.701008] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 940.718650] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 940.718824] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.710s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.718534] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 941.718845] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 941.924384] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.923964] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.924330] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 942.924330] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 942.944824] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.944960] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945111] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945239] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945365] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945484] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945603] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945721] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945839] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.945952] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 942.946082] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 942.946562] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.946739] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.946875] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 943.924286] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.716148] env[61855]: WARNING oslo_vmware.rw_handles [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 959.716148] env[61855]: ERROR oslo_vmware.rw_handles [ 959.716652] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 959.718773] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 959.719076] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Copying Virtual Disk [datastore2] vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/c05822b2-cbad-43a3-8363-d2034f0b3caa/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 959.719422] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc649c5d-bdc2-4f8d-ac4a-b0ed50230de0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.727984] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 959.727984] env[61855]: value = "task-4302856" [ 959.727984] env[61855]: _type = "Task" [ 959.727984] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.736122] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': task-4302856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.239027] env[61855]: DEBUG oslo_vmware.exceptions [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 960.239027] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.239677] env[61855]: ERROR nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 960.239677] env[61855]: Faults: ['InvalidArgument'] [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Traceback (most recent call last): [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] yield resources [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self.driver.spawn(context, instance, image_meta, [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self._fetch_image_if_missing(context, vi) [ 960.239677] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] image_cache(vi, tmp_image_ds_loc) [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] vm_util.copy_virtual_disk( [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] session._wait_for_task(vmdk_copy_task) [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] return self.wait_for_task(task_ref) [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] return evt.wait() [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] result = hub.switch() [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 960.240176] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] return self.greenlet.switch() [ 960.240567] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 960.240567] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self.f(*self.args, **self.kw) [ 960.240567] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 960.240567] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] raise exceptions.translate_fault(task_info.error) [ 960.240567] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 960.240567] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Faults: ['InvalidArgument'] [ 960.240567] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] [ 960.240567] env[61855]: INFO nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Terminating instance [ 960.241427] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.241594] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.241836] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-966d04ad-c056-46dd-b405-c5ae2ddb445a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.244041] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 960.244242] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 960.244979] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a5bc18-289d-4779-8518-bc013b7ce17c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.251866] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 960.253050] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a2e63da-77ef-43f2-9fe4-e4ecbeaf0caf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.254803] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.254803] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 960.255061] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0b63539-45b8-46dc-b01c-cef9122cef60 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.259689] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Waiting for the task: (returnval){ [ 960.259689] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52913270-7440-71cc-204f-79876d33d157" [ 960.259689] env[61855]: _type = "Task" [ 960.259689] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.266751] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52913270-7440-71cc-204f-79876d33d157, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.330052] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 960.330052] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 960.330052] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Deleting the datastore file [datastore2] 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.330052] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78d8304a-f585-46e1-9523-0111772b8483 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.336312] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for the task: (returnval){ [ 960.336312] env[61855]: value = "task-4302858" [ 960.336312] env[61855]: _type = "Task" [ 960.336312] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.343825] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': task-4302858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.770150] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 960.770539] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Creating directory with path [datastore2] vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.770789] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18e2d115-3c38-4a10-a2fd-2f808bc23e0b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.783324] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Created directory with path [datastore2] vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.783454] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Fetch image to [datastore2] vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 960.783603] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 960.784388] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae07e1d4-0fbf-4a93-9d98-23c8d5cd8dc8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.791128] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3251cfb0-dc3f-49cf-9431-d7673cd16635 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.800168] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffcc24f-0add-433e-bc71-c6c21541ea23 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.832221] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d8c428-7d91-4f0c-8230-05d6e4642b2f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.841215] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b8dde536-1f0c-4be6-a26c-c4e0cb7b6e81 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.847461] env[61855]: DEBUG oslo_vmware.api [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Task: {'id': task-4302858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068879} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.847751] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.847954] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 960.848159] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 960.848335] env[61855]: INFO nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Took 0.60 seconds to destroy the instance on the hypervisor. [ 960.850708] env[61855]: DEBUG nova.compute.claims [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 960.850884] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.851124] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.864227] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 960.939753] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 960.999425] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 960.999425] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 961.303248] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e50598-8c66-4054-b687-e0f32627111e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.310915] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0b991e-8605-4bbc-9e62-124427c2ea2e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.340145] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43a8385-0d03-4c26-806b-a096a365e6d1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.347579] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e72b8b5-d4fe-4dc8-8580-40054e09fd0a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.360800] env[61855]: DEBUG nova.compute.provider_tree [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.369098] env[61855]: DEBUG nova.scheduler.client.report [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 961.384319] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.533s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.384834] env[61855]: ERROR nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 961.384834] env[61855]: Faults: ['InvalidArgument'] [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Traceback (most recent call last): [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self.driver.spawn(context, instance, image_meta, [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self._fetch_image_if_missing(context, vi) [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] image_cache(vi, tmp_image_ds_loc) [ 961.384834] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] vm_util.copy_virtual_disk( [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] session._wait_for_task(vmdk_copy_task) [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] return self.wait_for_task(task_ref) [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] return evt.wait() [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] result = hub.switch() [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] return self.greenlet.switch() [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 961.385297] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] self.f(*self.args, **self.kw) [ 961.385794] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 961.385794] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] raise exceptions.translate_fault(task_info.error) [ 961.385794] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 961.385794] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Faults: ['InvalidArgument'] [ 961.385794] env[61855]: ERROR nova.compute.manager [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] [ 961.385794] env[61855]: DEBUG nova.compute.utils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 961.386915] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Build of instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 was re-scheduled: A specified parameter was not correct: fileType [ 961.386915] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 961.387301] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 961.387469] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 961.387641] env[61855]: DEBUG nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 961.387800] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 961.713471] env[61855]: DEBUG nova.network.neutron [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.725983] env[61855]: INFO nova.compute.manager [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Took 0.34 seconds to deallocate network for instance. [ 961.838153] env[61855]: INFO nova.scheduler.client.report [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Deleted allocations for instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 [ 961.861281] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de0df888-b94f-4cab-b0ae-3d0162561367 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 386.524s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.862472] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 45.417s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.862702] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Acquiring lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.862917] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.863128] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.865349] env[61855]: INFO nova.compute.manager [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Terminating instance [ 961.867513] env[61855]: DEBUG nova.compute.manager [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 961.867513] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 961.868292] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fb3498c-3af3-4508-aa80-2d010512fad4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.878949] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ae4fef-a536-4f53-bdd6-f70ba48ab306 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.889695] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 961.909711] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31 could not be found. [ 961.910726] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 961.910726] env[61855]: INFO nova.compute.manager [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Took 0.04 seconds to destroy the instance on the hypervisor. [ 961.910726] env[61855]: DEBUG oslo.service.loopingcall [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.910726] env[61855]: DEBUG nova.compute.manager [-] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 961.910726] env[61855]: DEBUG nova.network.neutron [-] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 961.951482] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.951799] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.953351] env[61855]: INFO nova.compute.claims [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 961.956953] env[61855]: DEBUG nova.network.neutron [-] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.967026] env[61855]: INFO nova.compute.manager [-] [instance: 4bdeda94-3126-4bce-8c4f-ea1a0aac6a31] Took 0.06 seconds to deallocate network for instance. [ 962.073186] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3d9d9f73-4f61-4a90-a9b3-464cf30ddfa2 tempest-ServersAdminTestJSON-1453767424 tempest-ServersAdminTestJSON-1453767424-project-member] Lock "4bdeda94-3126-4bce-8c4f-ea1a0aac6a31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.211s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.371018] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2111edb-da72-40ce-a473-9bdbfb6d93fd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.378893] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04332afb-3cd6-475b-914c-60c81629203c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.410774] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29aa956-4893-4788-b044-1c172c9cf87f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.415365] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d997b115-9fe0-4bed-a780-f3d20c90402e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.428051] env[61855]: DEBUG nova.compute.provider_tree [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.436664] env[61855]: DEBUG nova.scheduler.client.report [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.453869] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.502s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.456336] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 962.490074] env[61855]: DEBUG nova.compute.utils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 962.491513] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 962.491678] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 962.501748] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 962.574927] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 962.594989] env[61855]: DEBUG nova.policy [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7279eddfb3ca430084aa3057c1766a00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b9e10b82d104adcbe677576fcaaf991', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 962.608026] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 962.608026] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 962.608026] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.608192] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 962.608192] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.608192] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 962.608192] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 962.608192] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 962.608384] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 962.608384] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 962.608384] env[61855]: DEBUG nova.virt.hardware [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.608593] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1330cd7d-4eee-4949-8a70-77009bdacd04 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.619569] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ea2e92-4296-4302-8979-8521d9b5b964 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.205174] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Successfully created port: 10f6bb30-298c-488f-8302-12d3f2f7ee7d {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.212633] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.439890] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Successfully updated port: 10f6bb30-298c-488f-8302-12d3f2f7ee7d {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 964.445098] env[61855]: DEBUG nova.compute.manager [req-7f1337f0-9b15-43ea-aa64-916398b7a847 req-7a678408-7d40-4888-a61a-8614ec512929 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Received event network-vif-plugged-10f6bb30-298c-488f-8302-12d3f2f7ee7d {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 964.445098] env[61855]: DEBUG oslo_concurrency.lockutils [req-7f1337f0-9b15-43ea-aa64-916398b7a847 req-7a678408-7d40-4888-a61a-8614ec512929 service nova] Acquiring lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.445098] env[61855]: DEBUG oslo_concurrency.lockutils [req-7f1337f0-9b15-43ea-aa64-916398b7a847 req-7a678408-7d40-4888-a61a-8614ec512929 service nova] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.445098] env[61855]: DEBUG oslo_concurrency.lockutils [req-7f1337f0-9b15-43ea-aa64-916398b7a847 req-7a678408-7d40-4888-a61a-8614ec512929 service nova] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.445845] env[61855]: DEBUG nova.compute.manager [req-7f1337f0-9b15-43ea-aa64-916398b7a847 req-7a678408-7d40-4888-a61a-8614ec512929 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] No waiting events found dispatching network-vif-plugged-10f6bb30-298c-488f-8302-12d3f2f7ee7d {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 964.445845] env[61855]: WARNING nova.compute.manager [req-7f1337f0-9b15-43ea-aa64-916398b7a847 req-7a678408-7d40-4888-a61a-8614ec512929 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Received unexpected event network-vif-plugged-10f6bb30-298c-488f-8302-12d3f2f7ee7d for instance with vm_state building and task_state spawning. [ 964.452788] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "refresh_cache-bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.452788] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquired lock "refresh_cache-bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.452788] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 964.542838] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 964.976871] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Updating instance_info_cache with network_info: [{"id": "10f6bb30-298c-488f-8302-12d3f2f7ee7d", "address": "fa:16:3e:c7:58:0a", "network": {"id": "060b50df-d8dc-42c1-8594-dcc2f597bc6f", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1565291000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b9e10b82d104adcbe677576fcaaf991", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f6bb30-29", "ovs_interfaceid": "10f6bb30-298c-488f-8302-12d3f2f7ee7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.990923] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Releasing lock "refresh_cache-bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.991255] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Instance network_info: |[{"id": "10f6bb30-298c-488f-8302-12d3f2f7ee7d", "address": "fa:16:3e:c7:58:0a", "network": {"id": "060b50df-d8dc-42c1-8594-dcc2f597bc6f", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1565291000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b9e10b82d104adcbe677576fcaaf991", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f6bb30-29", "ovs_interfaceid": "10f6bb30-298c-488f-8302-12d3f2f7ee7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 964.991657] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:58:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10f6bb30-298c-488f-8302-12d3f2f7ee7d', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.999871] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Creating folder: Project (4b9e10b82d104adcbe677576fcaaf991). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.000884] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6afbff04-3dda-414b-bbfc-a1cc4015967c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.010209] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Created folder: Project (4b9e10b82d104adcbe677576fcaaf991) in parent group-v847048. [ 965.010402] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Creating folder: Instances. Parent ref: group-v847104. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.010633] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc4592e5-bbd2-4da1-8ace-23ae5aa8bc99 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.018359] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Created folder: Instances in parent group-v847104. [ 965.019112] env[61855]: DEBUG oslo.service.loopingcall [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.019112] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 965.019112] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d346240-8c83-4cde-bd9c-6136f77f07fb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.042822] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.042822] env[61855]: value = "task-4302861" [ 965.042822] env[61855]: _type = "Task" [ 965.042822] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.051405] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302861, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.552588] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302861, 'name': CreateVM_Task, 'duration_secs': 0.297676} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.552588] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 965.554741] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.554741] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.554741] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.554741] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3cc4931-0099-4b8a-a382-149c678a4a77 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.558396] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Waiting for the task: (returnval){ [ 965.558396] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52cffba5-8e2b-b308-ffbf-a72348e371c3" [ 965.558396] env[61855]: _type = "Task" [ 965.558396] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.566093] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52cffba5-8e2b-b308-ffbf-a72348e371c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.071779] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.072056] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.072282] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.535708] env[61855]: DEBUG nova.compute.manager [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Received event network-changed-10f6bb30-298c-488f-8302-12d3f2f7ee7d {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 966.536268] env[61855]: DEBUG nova.compute.manager [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Refreshing instance network info cache due to event network-changed-10f6bb30-298c-488f-8302-12d3f2f7ee7d. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 966.536268] env[61855]: DEBUG oslo_concurrency.lockutils [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] Acquiring lock "refresh_cache-bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.536268] env[61855]: DEBUG oslo_concurrency.lockutils [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] Acquired lock "refresh_cache-bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.536429] env[61855]: DEBUG nova.network.neutron [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Refreshing network info cache for port 10f6bb30-298c-488f-8302-12d3f2f7ee7d {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 967.375724] env[61855]: DEBUG nova.network.neutron [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Updated VIF entry in instance network info cache for port 10f6bb30-298c-488f-8302-12d3f2f7ee7d. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 967.376091] env[61855]: DEBUG nova.network.neutron [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Updating instance_info_cache with network_info: [{"id": "10f6bb30-298c-488f-8302-12d3f2f7ee7d", "address": "fa:16:3e:c7:58:0a", "network": {"id": "060b50df-d8dc-42c1-8594-dcc2f597bc6f", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1565291000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b9e10b82d104adcbe677576fcaaf991", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f6bb30-29", "ovs_interfaceid": "10f6bb30-298c-488f-8302-12d3f2f7ee7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.391558] env[61855]: DEBUG oslo_concurrency.lockutils [req-c65fc259-c82d-4c2b-a0fc-caf9ed7ca1b4 req-e8ccd609-08d8-4972-81a3-b6be04bb6f80 service nova] Releasing lock "refresh_cache-bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.781029] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "aa9a75c4-371f-407e-a79e-133606a9fabc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.781474] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.831566] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2a5b2582-45e6-49bc-9de2-18ddc691eb1f tempest-ServerActionsTestJSON-726919981 tempest-ServerActionsTestJSON-726919981-project-member] Acquiring lock "02f2ee69-9ecf-4176-943e-06cdf255c92d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.832755] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2a5b2582-45e6-49bc-9de2-18ddc691eb1f tempest-ServerActionsTestJSON-726919981 tempest-ServerActionsTestJSON-726919981-project-member] Lock "02f2ee69-9ecf-4176-943e-06cdf255c92d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.606756] env[61855]: DEBUG oslo_concurrency.lockutils [None req-09f01b8d-769d-40db-a4b3-1f09f5907ea8 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "54a8ce25-2900-44a4-9985-b70514fcc9d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.607018] env[61855]: DEBUG oslo_concurrency.lockutils [None req-09f01b8d-769d-40db-a4b3-1f09f5907ea8 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "54a8ce25-2900-44a4-9985-b70514fcc9d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.352925] env[61855]: DEBUG oslo_concurrency.lockutils [None req-249c6bb1-eb3f-4fea-a632-1bd9ac3d0466 tempest-InstanceActionsNegativeTestJSON-237435190 tempest-InstanceActionsNegativeTestJSON-237435190-project-member] Acquiring lock "decbac6e-4d06-42ea-bc7b-9050ae0dba6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.353233] env[61855]: DEBUG oslo_concurrency.lockutils [None req-249c6bb1-eb3f-4fea-a632-1bd9ac3d0466 tempest-InstanceActionsNegativeTestJSON-237435190 tempest-InstanceActionsNegativeTestJSON-237435190-project-member] Lock "decbac6e-4d06-42ea-bc7b-9050ae0dba6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.711239] env[61855]: DEBUG oslo_concurrency.lockutils [None req-be99f7f4-95ed-4188-8c3b-079573579a92 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "2dc6df74-b8c5-472f-9c02-d44a549e8aea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.711239] env[61855]: DEBUG oslo_concurrency.lockutils [None req-be99f7f4-95ed-4188-8c3b-079573579a92 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "2dc6df74-b8c5-472f-9c02-d44a549e8aea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.575048] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b35e2170-4470-4975-8b60-8afd1663e390 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "f6e20ee2-94f3-4e24-a14d-1ba5eab45823" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.575314] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b35e2170-4470-4975-8b60-8afd1663e390 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "f6e20ee2-94f3-4e24-a14d-1ba5eab45823" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.942035] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d62fc02-c91b-49df-837d-acb51127fe2b tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "b38fa534-0d62-40ce-ae69-9275ffe839e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.942314] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d62fc02-c91b-49df-837d-acb51127fe2b tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "b38fa534-0d62-40ce-ae69-9275ffe839e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.919032] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.928100] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.925060] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.925356] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.937929] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.938596] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.938596] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.938596] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1001.939686] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57c7709-5b12-4ca2-aed8-6167ca7293a9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.948916] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00080001-362b-41f2-a077-27d284e098d5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.963037] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9cd75c-cd55-4b0e-8b49-cee1c1bb9ccc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.969332] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f210342c-e8ec-4608-a429-04aab01c6abc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.998458] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180617MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1001.998670] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.998789] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.084766] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 311d492c-0093-4d64-a56f-80fce95b809a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.084937] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6c15201d-7373-4040-9256-84ff11fcfed2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085421] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085421] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085421] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085421] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085581] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085704] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085783] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.085871] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.098193] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.112431] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b693f06d-14c8-49f7-8870-8b440908de74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.126324] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f3215717-1220-47a1-be3d-d1d5efcac656 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.137025] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 89517dc6-96e8-4e89-aa1c-cdd43e340551 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.147064] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 13e65b84-f5e2-4352-94ec-b37d0803e279 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.157650] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 21d8fcb1-84bc-4d93-8a17-24230e0ee8cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.169804] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance faaec6a7-01f9-4f9b-992c-8c86a007b6aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.179966] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8767138c-2cc2-49ae-9f35-c433dfedcb45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.192401] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.202301] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance dbddb131-724e-44f0-ad59-9c7c1f6e4889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.211787] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2c085a5c-d229-42e2-9155-ad5647110e07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.225372] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.235433] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 95bdfda9-d381-4a0f-bfde-57b423ff19c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.246171] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ddb1501a-9afc-4916-ab4e-97b851b0f931 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.257228] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a0d8f45a-5b83-425a-b8ac-1d507a441bba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.267048] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.277987] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.287727] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.297436] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.309407] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 02f2ee69-9ecf-4176-943e-06cdf255c92d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.319734] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 54a8ce25-2900-44a4-9985-b70514fcc9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.330115] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance decbac6e-4d06-42ea-bc7b-9050ae0dba6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.339989] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2dc6df74-b8c5-472f-9c02-d44a549e8aea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.350204] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f6e20ee2-94f3-4e24-a14d-1ba5eab45823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.360272] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b38fa534-0d62-40ce-ae69-9275ffe839e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.360507] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1002.360657] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1002.740488] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949db72d-7530-48dd-9ec7-9f3d5fc47c5e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.747859] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061fb8b4-5383-4df8-a838-4f6873d4ea59 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.778570] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd89869-fad6-44a8-9cb6-cb4e0db9dfed {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.785489] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1786a145-de74-4fdc-b2ec-b30d35082b9b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.798207] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.807446] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.825100] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1002.825315] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.826s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.820282] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.843651] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.843839] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1003.843962] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1003.864601] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.864776] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.864911] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865053] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865195] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865315] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865434] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865556] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865677] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865795] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1003.865925] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1003.866405] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.866590] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.866751] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1003.924532] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1004.924271] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.315744] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.733218] env[61855]: WARNING oslo_vmware.rw_handles [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1009.733218] env[61855]: ERROR oslo_vmware.rw_handles [ 1009.733991] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1009.735728] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1009.736059] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Copying Virtual Disk [datastore2] vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/73389718-d737-4d03-b1c1-96ae84a007a6/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1009.736363] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db6b5ed9-be3f-4ace-bac3-dda8ea5d54e5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.745495] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Waiting for the task: (returnval){ [ 1009.745495] env[61855]: value = "task-4302862" [ 1009.745495] env[61855]: _type = "Task" [ 1009.745495] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.753069] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Task: {'id': task-4302862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.254941] env[61855]: DEBUG oslo_vmware.exceptions [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1010.255249] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.255804] env[61855]: ERROR nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1010.255804] env[61855]: Faults: ['InvalidArgument'] [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Traceback (most recent call last): [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] yield resources [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self.driver.spawn(context, instance, image_meta, [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self._fetch_image_if_missing(context, vi) [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1010.255804] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] image_cache(vi, tmp_image_ds_loc) [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] vm_util.copy_virtual_disk( [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] session._wait_for_task(vmdk_copy_task) [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] return self.wait_for_task(task_ref) [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] return evt.wait() [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] result = hub.switch() [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] return self.greenlet.switch() [ 1010.256290] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1010.256615] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self.f(*self.args, **self.kw) [ 1010.256615] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1010.256615] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] raise exceptions.translate_fault(task_info.error) [ 1010.256615] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1010.256615] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Faults: ['InvalidArgument'] [ 1010.256615] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] [ 1010.256615] env[61855]: INFO nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Terminating instance [ 1010.257813] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.258026] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.258258] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b216bd8-d49e-464d-8a8d-ce9ec60b596b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.260366] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1010.260631] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1010.261368] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ce2f02-f00e-4b11-a2a1-4b76080703b9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.268476] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1010.268697] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12c0869a-317b-49a6-94ec-8edca7c7db0a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.270790] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.270971] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1010.272047] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-544ace71-2320-49b7-a368-9279cb6affe0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.276576] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Waiting for the task: (returnval){ [ 1010.276576] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52572209-62e0-623b-cbc4-54a22aec7911" [ 1010.276576] env[61855]: _type = "Task" [ 1010.276576] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.283714] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52572209-62e0-623b-cbc4-54a22aec7911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.330606] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1010.330826] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1010.331019] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Deleting the datastore file [datastore2] 311d492c-0093-4d64-a56f-80fce95b809a {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.331294] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8492afa9-c215-4085-a2cb-6bcd3c4c4d28 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.338013] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Waiting for the task: (returnval){ [ 1010.338013] env[61855]: value = "task-4302864" [ 1010.338013] env[61855]: _type = "Task" [ 1010.338013] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.345244] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Task: {'id': task-4302864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.787495] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1010.787820] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Creating directory with path [datastore2] vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.788028] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e03f1cb7-9323-464f-8198-18171a86540e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.799180] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Created directory with path [datastore2] vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.799385] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Fetch image to [datastore2] vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1010.799557] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1010.800318] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cfef70-d967-4aae-a576-4ae55d5ddb4b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.807403] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aff69c0-f216-49cf-99b3-9e5a81dbfca5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.817779] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ad64fa-ca34-4fbf-95d2-dd93728c9761 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.860970] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ec3f10-26c0-4ab9-ad9c-a6a9652133e2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.868075] env[61855]: DEBUG oslo_vmware.api [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Task: {'id': task-4302864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07562} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.869561] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.869711] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1010.869946] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1010.870059] env[61855]: INFO nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1010.871869] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4f70f039-5168-47ce-ace4-261e26dfcc08 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.873744] env[61855]: DEBUG nova.compute.claims [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1010.873925] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.874152] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.897037] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1010.954996] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1011.014538] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1011.014766] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1011.379479] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad257cf-91d7-434a-a737-b7ef0add70f7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.387472] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9652149-86da-4337-80f4-2ec318902254 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.416881] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c827db4-1373-4153-bc85-a8b2a3f71462 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.424040] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcce3e68-d1ed-415e-b578-c9c81787b3cb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.437011] env[61855]: DEBUG nova.compute.provider_tree [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.446131] env[61855]: DEBUG nova.scheduler.client.report [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1011.460409] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.586s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.460979] env[61855]: ERROR nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1011.460979] env[61855]: Faults: ['InvalidArgument'] [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Traceback (most recent call last): [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self.driver.spawn(context, instance, image_meta, [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self._fetch_image_if_missing(context, vi) [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] image_cache(vi, tmp_image_ds_loc) [ 1011.460979] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] vm_util.copy_virtual_disk( [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] session._wait_for_task(vmdk_copy_task) [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] return self.wait_for_task(task_ref) [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] return evt.wait() [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] result = hub.switch() [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] return self.greenlet.switch() [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1011.461421] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] self.f(*self.args, **self.kw) [ 1011.461753] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1011.461753] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] raise exceptions.translate_fault(task_info.error) [ 1011.461753] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1011.461753] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Faults: ['InvalidArgument'] [ 1011.461753] env[61855]: ERROR nova.compute.manager [instance: 311d492c-0093-4d64-a56f-80fce95b809a] [ 1011.461753] env[61855]: DEBUG nova.compute.utils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1011.463418] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Build of instance 311d492c-0093-4d64-a56f-80fce95b809a was re-scheduled: A specified parameter was not correct: fileType [ 1011.463418] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1011.463825] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1011.464019] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1011.464212] env[61855]: DEBUG nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1011.464417] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1011.843187] env[61855]: DEBUG nova.network.neutron [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.854375] env[61855]: INFO nova.compute.manager [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Took 0.39 seconds to deallocate network for instance. [ 1011.957698] env[61855]: INFO nova.scheduler.client.report [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Deleted allocations for instance 311d492c-0093-4d64-a56f-80fce95b809a [ 1011.980671] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8a68408a-0bcc-4664-8240-da017402ce7e tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "311d492c-0093-4d64-a56f-80fce95b809a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 430.540s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.981835] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "311d492c-0093-4d64-a56f-80fce95b809a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 232.736s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.982092] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Acquiring lock "311d492c-0093-4d64-a56f-80fce95b809a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.982313] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "311d492c-0093-4d64-a56f-80fce95b809a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.982498] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "311d492c-0093-4d64-a56f-80fce95b809a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.984550] env[61855]: INFO nova.compute.manager [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Terminating instance [ 1011.989792] env[61855]: DEBUG nova.compute.manager [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1011.990034] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1011.990316] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73f50cb8-36b3-4dab-ab3f-c6d4584bcc00 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.001573] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8167ed55-a7af-4d02-b766-d226ea1944f7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.012181] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1012.032655] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 311d492c-0093-4d64-a56f-80fce95b809a could not be found. [ 1012.032874] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1012.033066] env[61855]: INFO nova.compute.manager [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1012.033320] env[61855]: DEBUG oslo.service.loopingcall [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.033547] env[61855]: DEBUG nova.compute.manager [-] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1012.033645] env[61855]: DEBUG nova.network.neutron [-] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1012.063019] env[61855]: DEBUG nova.network.neutron [-] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.065108] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.065108] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.066757] env[61855]: INFO nova.compute.claims [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.071907] env[61855]: INFO nova.compute.manager [-] [instance: 311d492c-0093-4d64-a56f-80fce95b809a] Took 0.04 seconds to deallocate network for instance. [ 1012.219234] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7cd213ba-3858-4542-8866-fcc1d95b85c3 tempest-ServersTestJSON-939054605 tempest-ServersTestJSON-939054605-project-member] Lock "311d492c-0093-4d64-a56f-80fce95b809a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.237s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.532442] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90850d3f-dfa9-40ee-b088-45481b06182e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.541022] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ba02d9-016e-44b4-ad5a-0ac0c245c5c0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.571048] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e14e575-4fa6-4a5b-beb9-914aec9ebf41 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.578320] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586b536b-aaeb-4d5a-b9ab-362776c03015 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.591320] env[61855]: DEBUG nova.compute.provider_tree [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.600532] env[61855]: DEBUG nova.scheduler.client.report [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1012.616587] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.551s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.616759] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1012.655209] env[61855]: DEBUG nova.compute.utils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.656313] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1012.656603] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1012.672370] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1012.723853] env[61855]: DEBUG nova.policy [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c35183dd37854f9eb4928c3ae33224ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f4ecf66e779420bbb734b4710723191', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1012.741281] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1012.769225] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1012.769515] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1012.769710] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.769910] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1012.770104] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.770240] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1012.770446] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1012.770606] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1012.770773] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1012.770938] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1012.771126] env[61855]: DEBUG nova.virt.hardware [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1012.772304] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddda7b6-a105-4eef-a171-02ca2c1691b5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.780398] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a7977a-ea46-4cec-8428-67f9ed1843e6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.213862] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Successfully created port: 62e79c60-f4d8-4472-9d45-2f2d0e1de4bf {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1014.215721] env[61855]: DEBUG nova.compute.manager [req-f7c394ac-5866-43ea-9c6c-06cb0827e9db req-2617c28c-e7d7-40e4-96b5-acde808d991b service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Received event network-vif-plugged-62e79c60-f4d8-4472-9d45-2f2d0e1de4bf {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1014.215899] env[61855]: DEBUG oslo_concurrency.lockutils [req-f7c394ac-5866-43ea-9c6c-06cb0827e9db req-2617c28c-e7d7-40e4-96b5-acde808d991b service nova] Acquiring lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.216161] env[61855]: DEBUG oslo_concurrency.lockutils [req-f7c394ac-5866-43ea-9c6c-06cb0827e9db req-2617c28c-e7d7-40e4-96b5-acde808d991b service nova] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.216343] env[61855]: DEBUG oslo_concurrency.lockutils [req-f7c394ac-5866-43ea-9c6c-06cb0827e9db req-2617c28c-e7d7-40e4-96b5-acde808d991b service nova] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.216517] env[61855]: DEBUG nova.compute.manager [req-f7c394ac-5866-43ea-9c6c-06cb0827e9db req-2617c28c-e7d7-40e4-96b5-acde808d991b service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] No waiting events found dispatching network-vif-plugged-62e79c60-f4d8-4472-9d45-2f2d0e1de4bf {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1014.216671] env[61855]: WARNING nova.compute.manager [req-f7c394ac-5866-43ea-9c6c-06cb0827e9db req-2617c28c-e7d7-40e4-96b5-acde808d991b service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Received unexpected event network-vif-plugged-62e79c60-f4d8-4472-9d45-2f2d0e1de4bf for instance with vm_state building and task_state spawning. [ 1014.319298] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Successfully updated port: 62e79c60-f4d8-4472-9d45-2f2d0e1de4bf {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.331815] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "refresh_cache-1f2b9ec1-5449-45a9-9691-857b15aaa9ff" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.331996] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquired lock "refresh_cache-1f2b9ec1-5449-45a9-9691-857b15aaa9ff" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.332124] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1014.385780] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1014.645164] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Updating instance_info_cache with network_info: [{"id": "62e79c60-f4d8-4472-9d45-2f2d0e1de4bf", "address": "fa:16:3e:1e:b7:72", "network": {"id": "f1dd8648-9288-450f-8c29-d2ed72381373", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1195883059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f4ecf66e779420bbb734b4710723191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62e79c60-f4", "ovs_interfaceid": "62e79c60-f4d8-4472-9d45-2f2d0e1de4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.659858] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Releasing lock "refresh_cache-1f2b9ec1-5449-45a9-9691-857b15aaa9ff" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.660240] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Instance network_info: |[{"id": "62e79c60-f4d8-4472-9d45-2f2d0e1de4bf", "address": "fa:16:3e:1e:b7:72", "network": {"id": "f1dd8648-9288-450f-8c29-d2ed72381373", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1195883059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f4ecf66e779420bbb734b4710723191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62e79c60-f4", "ovs_interfaceid": "62e79c60-f4d8-4472-9d45-2f2d0e1de4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1014.660684] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:b7:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62e79c60-f4d8-4472-9d45-2f2d0e1de4bf', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.668504] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Creating folder: Project (2f4ecf66e779420bbb734b4710723191). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1014.669046] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-137b8d12-b53b-4193-a6b1-50bc54aedbe1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.679457] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Created folder: Project (2f4ecf66e779420bbb734b4710723191) in parent group-v847048. [ 1014.679654] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Creating folder: Instances. Parent ref: group-v847107. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1014.679949] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8a1d185-317b-4954-9525-6a45953c20ef {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.688766] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Created folder: Instances in parent group-v847107. [ 1014.689081] env[61855]: DEBUG oslo.service.loopingcall [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.689232] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1014.689433] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-011d4197-408c-4f62-a397-b2781ae18598 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.708549] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.708549] env[61855]: value = "task-4302867" [ 1014.708549] env[61855]: _type = "Task" [ 1014.708549] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.716284] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302867, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.220119] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302867, 'name': CreateVM_Task, 'duration_secs': 0.281369} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.220342] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1015.220961] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.221132] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.221453] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1015.222042] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f1c347b-a506-4d9b-89d7-cf33be6b8b56 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.226561] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Waiting for the task: (returnval){ [ 1015.226561] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5230cef6-56f7-47a3-2e52-c52074b374d8" [ 1015.226561] env[61855]: _type = "Task" [ 1015.226561] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.234607] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5230cef6-56f7-47a3-2e52-c52074b374d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.738031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.738031] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.738031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.257350] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.313594] env[61855]: DEBUG nova.compute.manager [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Received event network-changed-62e79c60-f4d8-4472-9d45-2f2d0e1de4bf {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1016.313594] env[61855]: DEBUG nova.compute.manager [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Refreshing instance network info cache due to event network-changed-62e79c60-f4d8-4472-9d45-2f2d0e1de4bf. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1016.313594] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] Acquiring lock "refresh_cache-1f2b9ec1-5449-45a9-9691-857b15aaa9ff" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.313719] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] Acquired lock "refresh_cache-1f2b9ec1-5449-45a9-9691-857b15aaa9ff" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.313889] env[61855]: DEBUG nova.network.neutron [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Refreshing network info cache for port 62e79c60-f4d8-4472-9d45-2f2d0e1de4bf {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1016.659061] env[61855]: DEBUG nova.network.neutron [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Updated VIF entry in instance network info cache for port 62e79c60-f4d8-4472-9d45-2f2d0e1de4bf. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1016.659438] env[61855]: DEBUG nova.network.neutron [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Updating instance_info_cache with network_info: [{"id": "62e79c60-f4d8-4472-9d45-2f2d0e1de4bf", "address": "fa:16:3e:1e:b7:72", "network": {"id": "f1dd8648-9288-450f-8c29-d2ed72381373", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1195883059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f4ecf66e779420bbb734b4710723191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62e79c60-f4", "ovs_interfaceid": "62e79c60-f4d8-4472-9d45-2f2d0e1de4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.671907] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee2b68f5-5967-4d65-b7e4-ce5ef12a96d2 req-94012a94-ec58-4ddc-8752-53c4a36d7d44 service nova] Releasing lock "refresh_cache-1f2b9ec1-5449-45a9-9691-857b15aaa9ff" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.754221] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "6fbd649d-1fce-440f-9911-09b74df51489" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.754221] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "6fbd649d-1fce-440f-9911-09b74df51489" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.643729] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83f8252d-ef66-4a0e-8cbd-10aa78d1186a tempest-ImagesNegativeTestJSON-297444898 tempest-ImagesNegativeTestJSON-297444898-project-member] Acquiring lock "c9608176-75bf-418f-b91d-79c6d997f543" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.644122] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83f8252d-ef66-4a0e-8cbd-10aa78d1186a tempest-ImagesNegativeTestJSON-297444898 tempest-ImagesNegativeTestJSON-297444898-project-member] Lock "c9608176-75bf-418f-b91d-79c6d997f543" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.015772] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96727970-d7e7-4a20-80c3-04d142e6b0b3 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] Acquiring lock "78755b45-1bb8-4a3b-9c51-7408425a561f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.016130] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96727970-d7e7-4a20-80c3-04d142e6b0b3 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] Lock "78755b45-1bb8-4a3b-9c51-7408425a561f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.317266] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6bae17bc-d807-49c0-8a15-4becd8143752 tempest-ServerActionsTestOtherA-91012384 tempest-ServerActionsTestOtherA-91012384-project-member] Acquiring lock "0ff5c9d8-b95d-4127-95eb-fece90efe346" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.317599] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6bae17bc-d807-49c0-8a15-4becd8143752 tempest-ServerActionsTestOtherA-91012384 tempest-ServerActionsTestOtherA-91012384-project-member] Lock "0ff5c9d8-b95d-4127-95eb-fece90efe346" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.180367] env[61855]: DEBUG oslo_concurrency.lockutils [None req-192cffde-4429-4c87-aecc-f433da36fff0 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Acquiring lock "fe3646dd-a2ac-4ccb-9761-7c9b95be690e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.180658] env[61855]: DEBUG oslo_concurrency.lockutils [None req-192cffde-4429-4c87-aecc-f433da36fff0 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "fe3646dd-a2ac-4ccb-9761-7c9b95be690e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.553930] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aed3b3bf-47cf-4333-b559-ef9e41e54c81 tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] Acquiring lock "44f7b0f2-7585-4def-98d2-e34039db404c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.553930] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aed3b3bf-47cf-4333-b559-ef9e41e54c81 tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] Lock "44f7b0f2-7585-4def-98d2-e34039db404c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.344211] env[61855]: WARNING oslo_vmware.rw_handles [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1060.344211] env[61855]: ERROR oslo_vmware.rw_handles [ 1060.344806] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1060.351295] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1060.351295] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Copying Virtual Disk [datastore2] vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/e9e217be-e945-49e6-97db-8657c5fde5f8/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1060.351295] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0079166-7d1a-494d-bbc1-b0e9b4fb5b7f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.359197] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Waiting for the task: (returnval){ [ 1060.359197] env[61855]: value = "task-4302868" [ 1060.359197] env[61855]: _type = "Task" [ 1060.359197] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.369889] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Task: {'id': task-4302868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.392986] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquiring lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.392986] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.869395] env[61855]: DEBUG oslo_vmware.exceptions [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1060.869690] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.870268] env[61855]: ERROR nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1060.870268] env[61855]: Faults: ['InvalidArgument'] [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Traceback (most recent call last): [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] yield resources [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self.driver.spawn(context, instance, image_meta, [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self._fetch_image_if_missing(context, vi) [ 1060.870268] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] image_cache(vi, tmp_image_ds_loc) [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] vm_util.copy_virtual_disk( [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] session._wait_for_task(vmdk_copy_task) [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] return self.wait_for_task(task_ref) [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] return evt.wait() [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] result = hub.switch() [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1060.870654] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] return self.greenlet.switch() [ 1060.871047] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1060.871047] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self.f(*self.args, **self.kw) [ 1060.871047] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1060.871047] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] raise exceptions.translate_fault(task_info.error) [ 1060.871047] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1060.871047] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Faults: ['InvalidArgument'] [ 1060.871047] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] [ 1060.871047] env[61855]: INFO nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Terminating instance [ 1060.872273] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.872488] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.873221] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1060.873475] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1060.873738] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aec709e5-a525-4fbd-8255-75fe0c959728 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.877665] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd43f6e-3228-42d3-9940-b473f7974db4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.884139] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1060.884418] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47857178-1d4f-441f-9784-12a990545b97 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.886583] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.886801] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1060.887754] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a5c3379-c4d9-4054-bcca-d9dc5aa47544 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.892483] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Waiting for the task: (returnval){ [ 1060.892483] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524c2fa2-9a95-43fa-33f2-34b727b54047" [ 1060.892483] env[61855]: _type = "Task" [ 1060.892483] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.900063] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524c2fa2-9a95-43fa-33f2-34b727b54047, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.918973] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.967020] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1060.967020] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1060.967020] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Deleting the datastore file [datastore2] 6c15201d-7373-4040-9256-84ff11fcfed2 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1060.967020] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfac9ee8-ae7f-488d-af27-03e3ea0d69d6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.971287] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Waiting for the task: (returnval){ [ 1060.971287] env[61855]: value = "task-4302870" [ 1060.971287] env[61855]: _type = "Task" [ 1060.971287] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.979396] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Task: {'id': task-4302870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.403604] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1061.403871] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Creating directory with path [datastore2] vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.404131] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37dfd317-fee9-4a0b-8bd5-1642cc2b3bd0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.415357] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Created directory with path [datastore2] vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.415587] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Fetch image to [datastore2] vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1061.415943] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1061.416578] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0007ca-0c20-4b1f-9839-b9541aaa746b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.423627] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01044da-edbf-4f3d-a699-fd8609dba947 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.433411] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b56cbc-a3bb-4939-b667-9b89f52d8940 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.464294] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a014b7e-7b45-48e9-8217-b09fe5c6bd57 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.470270] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dd04b243-9de1-43f4-87bd-d1dcf5c76398 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.480490] env[61855]: DEBUG oslo_vmware.api [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Task: {'id': task-4302870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077292} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.480725] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.480908] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1061.481447] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1061.481666] env[61855]: INFO nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1061.483816] env[61855]: DEBUG nova.compute.claims [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1061.483987] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.484273] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.496252] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1061.559733] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1061.622141] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1061.622141] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1061.900832] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7b37b5-0a6b-4dbc-9b7c-24b2028bd083 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.909281] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920674a4-3e7c-402f-bb65-8348e8b0ae8d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.938382] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618355c9-da69-4f06-9974-4c51c63c8080 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.945264] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe9598d-ebaf-4e81-aa4f-33ae3b0d8a73 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.957641] env[61855]: DEBUG nova.compute.provider_tree [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.966351] env[61855]: DEBUG nova.scheduler.client.report [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1061.984161] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.500s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.984724] env[61855]: ERROR nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1061.984724] env[61855]: Faults: ['InvalidArgument'] [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Traceback (most recent call last): [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self.driver.spawn(context, instance, image_meta, [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self._fetch_image_if_missing(context, vi) [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] image_cache(vi, tmp_image_ds_loc) [ 1061.984724] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] vm_util.copy_virtual_disk( [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] session._wait_for_task(vmdk_copy_task) [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] return self.wait_for_task(task_ref) [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] return evt.wait() [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] result = hub.switch() [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] return self.greenlet.switch() [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1061.985106] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] self.f(*self.args, **self.kw) [ 1061.985476] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1061.985476] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] raise exceptions.translate_fault(task_info.error) [ 1061.985476] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1061.985476] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Faults: ['InvalidArgument'] [ 1061.985476] env[61855]: ERROR nova.compute.manager [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] [ 1061.985476] env[61855]: DEBUG nova.compute.utils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1061.986825] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Build of instance 6c15201d-7373-4040-9256-84ff11fcfed2 was re-scheduled: A specified parameter was not correct: fileType [ 1061.986825] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1061.987252] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1061.987434] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1061.987628] env[61855]: DEBUG nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1061.987796] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1062.390744] env[61855]: DEBUG nova.network.neutron [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.404449] env[61855]: INFO nova.compute.manager [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Took 0.42 seconds to deallocate network for instance. [ 1062.505243] env[61855]: INFO nova.scheduler.client.report [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Deleted allocations for instance 6c15201d-7373-4040-9256-84ff11fcfed2 [ 1062.528701] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8e1f3d9b-018a-4fb8-83a1-e3c50e617093 tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "6c15201d-7373-4040-9256-84ff11fcfed2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 474.873s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.529874] env[61855]: DEBUG oslo_concurrency.lockutils [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "6c15201d-7373-4040-9256-84ff11fcfed2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 275.969s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.530118] env[61855]: DEBUG oslo_concurrency.lockutils [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Acquiring lock "6c15201d-7373-4040-9256-84ff11fcfed2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.530342] env[61855]: DEBUG oslo_concurrency.lockutils [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "6c15201d-7373-4040-9256-84ff11fcfed2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.530533] env[61855]: DEBUG oslo_concurrency.lockutils [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "6c15201d-7373-4040-9256-84ff11fcfed2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.533025] env[61855]: INFO nova.compute.manager [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Terminating instance [ 1062.534612] env[61855]: DEBUG nova.compute.manager [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1062.534807] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1062.535297] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7135fbc3-ae89-49ba-88ed-dac3ad8fcb07 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.546362] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135e8cb8-9880-439a-9f28-f9e86646022d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.576985] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6c15201d-7373-4040-9256-84ff11fcfed2 could not be found. [ 1062.577213] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1062.577396] env[61855]: INFO nova.compute.manager [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1062.577647] env[61855]: DEBUG oslo.service.loopingcall [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.577996] env[61855]: DEBUG nova.compute.manager [None req-5fa6118b-e4c2-415e-9fcc-57fe7e76f571 tempest-ServerRescueTestJSON-1125875554 tempest-ServerRescueTestJSON-1125875554-project-member] [instance: b693f06d-14c8-49f7-8870-8b440908de74] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1062.580338] env[61855]: DEBUG nova.compute.manager [-] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1062.580446] env[61855]: DEBUG nova.network.neutron [-] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1062.605108] env[61855]: DEBUG nova.compute.manager [None req-5fa6118b-e4c2-415e-9fcc-57fe7e76f571 tempest-ServerRescueTestJSON-1125875554 tempest-ServerRescueTestJSON-1125875554-project-member] [instance: b693f06d-14c8-49f7-8870-8b440908de74] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1062.608667] env[61855]: DEBUG nova.network.neutron [-] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.620519] env[61855]: INFO nova.compute.manager [-] [instance: 6c15201d-7373-4040-9256-84ff11fcfed2] Took 0.04 seconds to deallocate network for instance. [ 1062.631872] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5fa6118b-e4c2-415e-9fcc-57fe7e76f571 tempest-ServerRescueTestJSON-1125875554 tempest-ServerRescueTestJSON-1125875554-project-member] Lock "b693f06d-14c8-49f7-8870-8b440908de74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.463s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.642183] env[61855]: DEBUG nova.compute.manager [None req-fd1649f1-f795-4e80-8aa2-5964048fb0f7 tempest-TenantUsagesTestJSON-110961510 tempest-TenantUsagesTestJSON-110961510-project-member] [instance: f3215717-1220-47a1-be3d-d1d5efcac656] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1062.673101] env[61855]: DEBUG nova.compute.manager [None req-fd1649f1-f795-4e80-8aa2-5964048fb0f7 tempest-TenantUsagesTestJSON-110961510 tempest-TenantUsagesTestJSON-110961510-project-member] [instance: f3215717-1220-47a1-be3d-d1d5efcac656] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1062.695358] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fd1649f1-f795-4e80-8aa2-5964048fb0f7 tempest-TenantUsagesTestJSON-110961510 tempest-TenantUsagesTestJSON-110961510-project-member] Lock "f3215717-1220-47a1-be3d-d1d5efcac656" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.140s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.707319] env[61855]: DEBUG nova.compute.manager [None req-c3071069-b926-425d-a765-226b6a0d43b0 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] [instance: 89517dc6-96e8-4e89-aa1c-cdd43e340551] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1062.732255] env[61855]: DEBUG oslo_concurrency.lockutils [None req-faef15bc-dd8c-49f6-8b26-cdf025ab4c8d tempest-FloatingIPsAssociationTestJSON-1345838528 tempest-FloatingIPsAssociationTestJSON-1345838528-project-member] Lock "6c15201d-7373-4040-9256-84ff11fcfed2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.202s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.736523] env[61855]: DEBUG nova.compute.manager [None req-c3071069-b926-425d-a765-226b6a0d43b0 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] [instance: 89517dc6-96e8-4e89-aa1c-cdd43e340551] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1062.771643] env[61855]: DEBUG oslo_concurrency.lockutils [None req-c3071069-b926-425d-a765-226b6a0d43b0 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] Lock "89517dc6-96e8-4e89-aa1c-cdd43e340551" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.264s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.781682] env[61855]: DEBUG nova.compute.manager [None req-2f116cda-594f-4b47-9f5f-b932f768a2a8 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] [instance: 13e65b84-f5e2-4352-94ec-b37d0803e279] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1062.806879] env[61855]: DEBUG nova.compute.manager [None req-2f116cda-594f-4b47-9f5f-b932f768a2a8 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] [instance: 13e65b84-f5e2-4352-94ec-b37d0803e279] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1062.827945] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2f116cda-594f-4b47-9f5f-b932f768a2a8 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Lock "13e65b84-f5e2-4352-94ec-b37d0803e279" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.296s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.838311] env[61855]: DEBUG nova.compute.manager [None req-a1df8665-6c7b-4843-96dc-273cf3a4a48a tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] [instance: 21d8fcb1-84bc-4d93-8a17-24230e0ee8cb] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1062.862035] env[61855]: DEBUG nova.compute.manager [None req-a1df8665-6c7b-4843-96dc-273cf3a4a48a tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] [instance: 21d8fcb1-84bc-4d93-8a17-24230e0ee8cb] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1062.882627] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a1df8665-6c7b-4843-96dc-273cf3a4a48a tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Lock "21d8fcb1-84bc-4d93-8a17-24230e0ee8cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.305s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.892671] env[61855]: DEBUG nova.compute.manager [None req-29d03a1f-0ec9-4b2f-bce2-8bc90979bcf4 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] [instance: faaec6a7-01f9-4f9b-992c-8c86a007b6aa] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1062.916829] env[61855]: DEBUG nova.compute.manager [None req-29d03a1f-0ec9-4b2f-bce2-8bc90979bcf4 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] [instance: faaec6a7-01f9-4f9b-992c-8c86a007b6aa] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1062.923955] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.924186] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1062.924306] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1062.937853] env[61855]: DEBUG oslo_concurrency.lockutils [None req-29d03a1f-0ec9-4b2f-bce2-8bc90979bcf4 tempest-ListServerFiltersTestJSON-283633926 tempest-ListServerFiltersTestJSON-283633926-project-member] Lock "faaec6a7-01f9-4f9b-992c-8c86a007b6aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.662s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.943851] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.944011] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.944155] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.944307] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.944539] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.944681] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.944812] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.944923] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.945076] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1062.945207] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1062.945667] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.945859] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.946037] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.951213] env[61855]: DEBUG nova.compute.manager [None req-e7d6ed71-b85e-42de-9556-9e476166d75e tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] [instance: 8767138c-2cc2-49ae-9f35-c433dfedcb45] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1062.955692] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.955890] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.956061] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.956219] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1062.957289] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b24c1b-4673-4ae1-a460-03f233d8f21d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.965923] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9e7872-6998-4794-983d-af73c6424fe3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.979552] env[61855]: DEBUG nova.compute.manager [None req-e7d6ed71-b85e-42de-9556-9e476166d75e tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] [instance: 8767138c-2cc2-49ae-9f35-c433dfedcb45] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1062.980783] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5638f05-f97a-4c35-be03-429ef62d1f4a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.987538] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4778a8-e135-4d5e-a952-2856a4b6d6fe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.017954] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180638MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1063.018115] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.018306] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.020417] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e7d6ed71-b85e-42de-9556-9e476166d75e tempest-AttachVolumeTestJSON-982122452 tempest-AttachVolumeTestJSON-982122452-project-member] Lock "8767138c-2cc2-49ae-9f35-c433dfedcb45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.341s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.029555] env[61855]: DEBUG nova.compute.manager [None req-f8f17b7d-2f2b-4f0d-b5ae-cc513e699635 tempest-ServersListShow296Test-1045640085 tempest-ServersListShow296Test-1045640085-project-member] [instance: 6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.071347] env[61855]: DEBUG nova.compute.manager [None req-f8f17b7d-2f2b-4f0d-b5ae-cc513e699635 tempest-ServersListShow296Test-1045640085 tempest-ServersListShow296Test-1045640085-project-member] [instance: 6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1063.091630] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8f17b7d-2f2b-4f0d-b5ae-cc513e699635 tempest-ServersListShow296Test-1045640085 tempest-ServersListShow296Test-1045640085-project-member] Lock "6d6f1a5e-eb1d-4b78-9f0d-7c37541eba28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.068s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.096590] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.096745] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.096872] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.097027] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.097138] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.097262] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.097378] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.097571] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.097617] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.100421] env[61855]: DEBUG nova.compute.manager [None req-78cd50a1-c927-4ee2-863a-667efd5b48a8 tempest-ServersAaction247Test-540627758 tempest-ServersAaction247Test-540627758-project-member] [instance: dbddb131-724e-44f0-ad59-9c7c1f6e4889] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.107513] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.118680] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.126550] env[61855]: DEBUG nova.compute.manager [None req-78cd50a1-c927-4ee2-863a-667efd5b48a8 tempest-ServersAaction247Test-540627758 tempest-ServersAaction247Test-540627758-project-member] [instance: dbddb131-724e-44f0-ad59-9c7c1f6e4889] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1063.131513] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.141076] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.146440] env[61855]: DEBUG oslo_concurrency.lockutils [None req-78cd50a1-c927-4ee2-863a-667efd5b48a8 tempest-ServersAaction247Test-540627758 tempest-ServersAaction247Test-540627758-project-member] Lock "dbddb131-724e-44f0-ad59-9c7c1f6e4889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.269s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.151629] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 02f2ee69-9ecf-4176-943e-06cdf255c92d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.192739] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 54a8ce25-2900-44a4-9985-b70514fcc9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.205515] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance decbac6e-4d06-42ea-bc7b-9050ae0dba6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.211759] env[61855]: DEBUG nova.compute.manager [None req-2ab3115e-c076-40a7-916d-3014d6898a89 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: 2c085a5c-d229-42e2-9155-ad5647110e07] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.223214] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2dc6df74-b8c5-472f-9c02-d44a549e8aea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.235176] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f6e20ee2-94f3-4e24-a14d-1ba5eab45823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.241826] env[61855]: DEBUG nova.compute.manager [None req-2ab3115e-c076-40a7-916d-3014d6898a89 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: 2c085a5c-d229-42e2-9155-ad5647110e07] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1063.246157] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b38fa534-0d62-40ce-ae69-9275ffe839e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.257228] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.265591] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2ab3115e-c076-40a7-916d-3014d6898a89 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "2c085a5c-d229-42e2-9155-ad5647110e07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.709s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.267764] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c9608176-75bf-418f-b91d-79c6d997f543 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.273765] env[61855]: DEBUG nova.compute.manager [None req-64f41831-9cd0-4835-9ca6-c4370c67b52f tempest-ServersV294TestFqdnHostnames-446546208 tempest-ServersV294TestFqdnHostnames-446546208-project-member] [instance: dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.277658] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 78755b45-1bb8-4a3b-9c51-7408425a561f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.287013] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ff5c9d8-b95d-4127-95eb-fece90efe346 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.318951] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance fe3646dd-a2ac-4ccb-9761-7c9b95be690e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.321393] env[61855]: DEBUG nova.compute.manager [None req-64f41831-9cd0-4835-9ca6-c4370c67b52f tempest-ServersV294TestFqdnHostnames-446546208 tempest-ServersV294TestFqdnHostnames-446546208-project-member] [instance: dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1063.328946] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 44f7b0f2-7585-4def-98d2-e34039db404c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.340202] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.340447] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1063.341030] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=110GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1063.343773] env[61855]: DEBUG oslo_concurrency.lockutils [None req-64f41831-9cd0-4835-9ca6-c4370c67b52f tempest-ServersV294TestFqdnHostnames-446546208 tempest-ServersV294TestFqdnHostnames-446546208-project-member] Lock "dab882d6-3a3d-4a4f-bf9e-9b1848e8fd5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.705s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.356393] env[61855]: DEBUG nova.compute.manager [None req-d397419e-dda3-48f6-823b-404640b7b165 tempest-ServersTestFqdnHostnames-425384496 tempest-ServersTestFqdnHostnames-425384496-project-member] [instance: 95bdfda9-d381-4a0f-bfde-57b423ff19c7] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.381339] env[61855]: DEBUG nova.compute.manager [None req-d397419e-dda3-48f6-823b-404640b7b165 tempest-ServersTestFqdnHostnames-425384496 tempest-ServersTestFqdnHostnames-425384496-project-member] [instance: 95bdfda9-d381-4a0f-bfde-57b423ff19c7] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1063.401486] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d397419e-dda3-48f6-823b-404640b7b165 tempest-ServersTestFqdnHostnames-425384496 tempest-ServersTestFqdnHostnames-425384496-project-member] Lock "95bdfda9-d381-4a0f-bfde-57b423ff19c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.406s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.410636] env[61855]: DEBUG nova.compute.manager [None req-07888221-052d-4e40-b07d-de7f522ab20c tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] [instance: ddb1501a-9afc-4916-ab4e-97b851b0f931] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.435467] env[61855]: DEBUG nova.compute.manager [None req-07888221-052d-4e40-b07d-de7f522ab20c tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] [instance: ddb1501a-9afc-4916-ab4e-97b851b0f931] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1063.454280] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07888221-052d-4e40-b07d-de7f522ab20c tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] Lock "ddb1501a-9afc-4916-ab4e-97b851b0f931" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.243s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.463468] env[61855]: DEBUG nova.compute.manager [None req-de4f7f2b-aa3c-47e6-8367-193f7f1edac9 tempest-ServerActionsV293TestJSON-717373711 tempest-ServerActionsV293TestJSON-717373711-project-member] [instance: a0d8f45a-5b83-425a-b8ac-1d507a441bba] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.487066] env[61855]: DEBUG nova.compute.manager [None req-de4f7f2b-aa3c-47e6-8367-193f7f1edac9 tempest-ServerActionsV293TestJSON-717373711 tempest-ServerActionsV293TestJSON-717373711-project-member] [instance: a0d8f45a-5b83-425a-b8ac-1d507a441bba] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1063.510952] env[61855]: DEBUG oslo_concurrency.lockutils [None req-de4f7f2b-aa3c-47e6-8367-193f7f1edac9 tempest-ServerActionsV293TestJSON-717373711 tempest-ServerActionsV293TestJSON-717373711-project-member] Lock "a0d8f45a-5b83-425a-b8ac-1d507a441bba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.873s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.521568] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1063.575329] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.665715] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a7b20f-9742-4938-86b4-7aaf113b7cb5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.673697] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e907a94-ff3f-4174-b95e-9b0a68263253 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.704086] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e437a10a-ef6b-4936-8468-686e5f27c595 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.711665] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e01e453-4a47-471c-ba0c-2fd761f21e4b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.724788] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.734182] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1063.750423] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1063.750423] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.732s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.751169] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.176s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.752705] env[61855]: INFO nova.compute.claims [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1064.101249] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd1642d-11b5-4696-9bf7-1e1a08133170 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.110613] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1257fb95-3f0c-4663-b7a7-d9cf32ac18dd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.140287] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13396e52-df9a-4aac-a384-96b06a87203e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.147674] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d6a626-a06c-42c9-a20c-2b1844d92749 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.162035] env[61855]: DEBUG nova.compute.provider_tree [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.172455] env[61855]: DEBUG nova.scheduler.client.report [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1064.188259] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.438s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.188860] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1064.225589] env[61855]: DEBUG nova.compute.utils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1064.226830] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1064.227008] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1064.239739] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1064.302572] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1064.308863] env[61855]: DEBUG nova.policy [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a61bfba6ab94cf08389d4b4cd35fb07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a9db53ed1fd436eb679d7b7b3ac1709', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1064.327304] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1064.327577] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1064.327739] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1064.327927] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1064.328101] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1064.328258] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1064.328471] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1064.328636] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1064.328806] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1064.328973] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1064.329168] env[61855]: DEBUG nova.virt.hardware [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1064.330045] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8904711-9ba7-43d1-86b4-1cfaa93484a2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.337920] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e278217e-06ef-47e2-9e52-69b5dfc386c9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.637718] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Successfully created port: 01cef77b-658c-4fe7-bb12-76b6c1029da7 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1064.732983] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1064.733696] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1064.733696] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1065.574242] env[61855]: DEBUG nova.compute.manager [req-d9e041b1-4c03-423b-836a-126418d1094a req-01f6b562-5ea0-4ea9-a50f-55158ad1589e service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Received event network-vif-plugged-01cef77b-658c-4fe7-bb12-76b6c1029da7 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1065.574242] env[61855]: DEBUG oslo_concurrency.lockutils [req-d9e041b1-4c03-423b-836a-126418d1094a req-01f6b562-5ea0-4ea9-a50f-55158ad1589e service nova] Acquiring lock "adfd94b5-7e03-49d1-a445-c58b296e5185-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.574242] env[61855]: DEBUG oslo_concurrency.lockutils [req-d9e041b1-4c03-423b-836a-126418d1094a req-01f6b562-5ea0-4ea9-a50f-55158ad1589e service nova] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.574242] env[61855]: DEBUG oslo_concurrency.lockutils [req-d9e041b1-4c03-423b-836a-126418d1094a req-01f6b562-5ea0-4ea9-a50f-55158ad1589e service nova] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.574527] env[61855]: DEBUG nova.compute.manager [req-d9e041b1-4c03-423b-836a-126418d1094a req-01f6b562-5ea0-4ea9-a50f-55158ad1589e service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] No waiting events found dispatching network-vif-plugged-01cef77b-658c-4fe7-bb12-76b6c1029da7 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1065.574527] env[61855]: WARNING nova.compute.manager [req-d9e041b1-4c03-423b-836a-126418d1094a req-01f6b562-5ea0-4ea9-a50f-55158ad1589e service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Received unexpected event network-vif-plugged-01cef77b-658c-4fe7-bb12-76b6c1029da7 for instance with vm_state building and task_state spawning. [ 1065.697335] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Successfully updated port: 01cef77b-658c-4fe7-bb12-76b6c1029da7 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1065.706778] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "refresh_cache-adfd94b5-7e03-49d1-a445-c58b296e5185" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.706994] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquired lock "refresh_cache-adfd94b5-7e03-49d1-a445-c58b296e5185" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.707197] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1065.749130] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1065.923983] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.932494] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Updating instance_info_cache with network_info: [{"id": "01cef77b-658c-4fe7-bb12-76b6c1029da7", "address": "fa:16:3e:65:59:4f", "network": {"id": "cb9dd2f2-ba41-4e63-a274-77bb2275f08f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-305164877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9db53ed1fd436eb679d7b7b3ac1709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01cef77b-65", "ovs_interfaceid": "01cef77b-658c-4fe7-bb12-76b6c1029da7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.945775] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Releasing lock "refresh_cache-adfd94b5-7e03-49d1-a445-c58b296e5185" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.945775] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Instance network_info: |[{"id": "01cef77b-658c-4fe7-bb12-76b6c1029da7", "address": "fa:16:3e:65:59:4f", "network": {"id": "cb9dd2f2-ba41-4e63-a274-77bb2275f08f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-305164877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9db53ed1fd436eb679d7b7b3ac1709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01cef77b-65", "ovs_interfaceid": "01cef77b-658c-4fe7-bb12-76b6c1029da7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1065.945966] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:59:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '418ddd3d-5f64-407e-8e0c-c8b81639bee9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01cef77b-658c-4fe7-bb12-76b6c1029da7', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1065.951929] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Creating folder: Project (2a9db53ed1fd436eb679d7b7b3ac1709). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1065.952564] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3865442-316d-43a0-9213-bd56aaad9507 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.962916] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Created folder: Project (2a9db53ed1fd436eb679d7b7b3ac1709) in parent group-v847048. [ 1065.963122] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Creating folder: Instances. Parent ref: group-v847110. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1065.963352] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9169648c-fe84-4e78-b8bf-6383f71653f3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.971716] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Created folder: Instances in parent group-v847110. [ 1065.971940] env[61855]: DEBUG oslo.service.loopingcall [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1065.972133] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1065.972338] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27bbb6e7-a916-4c26-86a6-2644573a45a6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.990739] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1065.990739] env[61855]: value = "task-4302873" [ 1065.990739] env[61855]: _type = "Task" [ 1065.990739] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.998151] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302873, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.500526] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302873, 'name': CreateVM_Task, 'duration_secs': 0.324482} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.502047] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1066.502047] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.502047] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.502047] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1066.502306] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-635459f2-b4c7-47e7-b2a1-3ad06a8eb20d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.506737] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Waiting for the task: (returnval){ [ 1066.506737] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]526555da-a72f-9c49-5b89-abd518c248ee" [ 1066.506737] env[61855]: _type = "Task" [ 1066.506737] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.514321] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]526555da-a72f-9c49-5b89-abd518c248ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.924651] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.020982] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.021270] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1067.021486] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.641410] env[61855]: DEBUG nova.compute.manager [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Received event network-changed-01cef77b-658c-4fe7-bb12-76b6c1029da7 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1067.641521] env[61855]: DEBUG nova.compute.manager [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Refreshing instance network info cache due to event network-changed-01cef77b-658c-4fe7-bb12-76b6c1029da7. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1067.641729] env[61855]: DEBUG oslo_concurrency.lockutils [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] Acquiring lock "refresh_cache-adfd94b5-7e03-49d1-a445-c58b296e5185" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.641875] env[61855]: DEBUG oslo_concurrency.lockutils [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] Acquired lock "refresh_cache-adfd94b5-7e03-49d1-a445-c58b296e5185" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.642212] env[61855]: DEBUG nova.network.neutron [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Refreshing network info cache for port 01cef77b-658c-4fe7-bb12-76b6c1029da7 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1067.990882] env[61855]: DEBUG nova.network.neutron [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Updated VIF entry in instance network info cache for port 01cef77b-658c-4fe7-bb12-76b6c1029da7. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1067.991260] env[61855]: DEBUG nova.network.neutron [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Updating instance_info_cache with network_info: [{"id": "01cef77b-658c-4fe7-bb12-76b6c1029da7", "address": "fa:16:3e:65:59:4f", "network": {"id": "cb9dd2f2-ba41-4e63-a274-77bb2275f08f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-305164877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a9db53ed1fd436eb679d7b7b3ac1709", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01cef77b-65", "ovs_interfaceid": "01cef77b-658c-4fe7-bb12-76b6c1029da7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.000298] env[61855]: DEBUG oslo_concurrency.lockutils [req-0c184418-0906-47e9-9eb4-d3e9eab45011 req-12ff2eec-f3de-4173-bf55-3b31d5be0e5c service nova] Releasing lock "refresh_cache-adfd94b5-7e03-49d1-a445-c58b296e5185" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.167291] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "adfd94b5-7e03-49d1-a445-c58b296e5185" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.952505] env[61855]: WARNING oslo_vmware.rw_handles [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1106.952505] env[61855]: ERROR oslo_vmware.rw_handles [ 1106.953180] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1106.955347] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1106.955710] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Copying Virtual Disk [datastore2] vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/f335d3c3-cff0-4c62-969f-429826bf8cf2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1106.956013] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d95ee53a-f5f7-49cf-9c00-18d471e234f8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.965148] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Waiting for the task: (returnval){ [ 1106.965148] env[61855]: value = "task-4302874" [ 1106.965148] env[61855]: _type = "Task" [ 1106.965148] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.973033] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Task: {'id': task-4302874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.476025] env[61855]: DEBUG oslo_vmware.exceptions [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1107.476330] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.476870] env[61855]: ERROR nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1107.476870] env[61855]: Faults: ['InvalidArgument'] [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Traceback (most recent call last): [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] yield resources [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self.driver.spawn(context, instance, image_meta, [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self._fetch_image_if_missing(context, vi) [ 1107.476870] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] image_cache(vi, tmp_image_ds_loc) [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] vm_util.copy_virtual_disk( [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] session._wait_for_task(vmdk_copy_task) [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] return self.wait_for_task(task_ref) [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] return evt.wait() [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] result = hub.switch() [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1107.477299] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] return self.greenlet.switch() [ 1107.477723] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1107.477723] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self.f(*self.args, **self.kw) [ 1107.477723] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1107.477723] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] raise exceptions.translate_fault(task_info.error) [ 1107.477723] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1107.477723] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Faults: ['InvalidArgument'] [ 1107.477723] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] [ 1107.477723] env[61855]: INFO nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Terminating instance [ 1107.478770] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.478981] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1107.479229] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44f16a06-1622-4b02-a965-a2516b3a33ce {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.482213] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1107.482415] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1107.483165] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69e19a7-8509-432e-bee5-8f9e7be762f9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.489843] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1107.490244] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09641935-ecd4-4351-9b43-6d8a67c9932b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.492298] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1107.492473] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1107.493417] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-418aac60-f142-47cc-903e-cd1408cb0f1c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.497833] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Waiting for the task: (returnval){ [ 1107.497833] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52f7f3e8-d398-b9cf-a8e3-09030f15bd32" [ 1107.497833] env[61855]: _type = "Task" [ 1107.497833] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.506335] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52f7f3e8-d398-b9cf-a8e3-09030f15bd32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.554953] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1107.555183] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1107.555363] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Deleting the datastore file [datastore2] 9b0b21f7-bdc0-488c-a7fc-234727c26b68 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1107.555817] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c917ef1-44f0-41fd-ae0a-4907c5d37ab5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.562120] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Waiting for the task: (returnval){ [ 1107.562120] env[61855]: value = "task-4302876" [ 1107.562120] env[61855]: _type = "Task" [ 1107.562120] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.569637] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Task: {'id': task-4302876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.009046] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1108.009046] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Creating directory with path [datastore2] vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.009367] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-917ae42f-f54f-46bf-8a38-19b0f142eac2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.021024] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Created directory with path [datastore2] vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.021024] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Fetch image to [datastore2] vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1108.021024] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1108.021367] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4112679e-6aef-4291-8eee-c3a8d16d3f31 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.027814] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c86812d-8505-4c34-89b6-64947a1f9e1b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.036735] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5eea7e3-5b9a-484c-80a7-6264abc9590b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.070672] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c0da8d-5319-4496-b013-df5141067446 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.077458] env[61855]: DEBUG oslo_vmware.api [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Task: {'id': task-4302876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075331} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.078852] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1108.079070] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1108.079267] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1108.079455] env[61855]: INFO nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1108.081506] env[61855]: DEBUG nova.compute.claims [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1108.081680] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.081894] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.085402] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c1172b63-8436-40b9-bdb1-8e46f3a3ef4b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.104635] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1108.160237] env[61855]: DEBUG oslo_vmware.rw_handles [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1108.221522] env[61855]: DEBUG oslo_vmware.rw_handles [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1108.221522] env[61855]: DEBUG oslo_vmware.rw_handles [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1108.466704] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67f6c14-6387-4fe5-9e04-e7c7a4e4f925 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.474794] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f9f2c5-db16-44bb-8c03-b31ae2821b81 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.505035] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef15e8ae-d678-4b10-a680-765f267a58e2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.511202] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649352b7-3b30-4de6-94c1-c94368a9094f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.525571] env[61855]: DEBUG nova.compute.provider_tree [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.534990] env[61855]: DEBUG nova.scheduler.client.report [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1108.571860] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.490s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.572260] env[61855]: ERROR nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1108.572260] env[61855]: Faults: ['InvalidArgument'] [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Traceback (most recent call last): [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self.driver.spawn(context, instance, image_meta, [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self._fetch_image_if_missing(context, vi) [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] image_cache(vi, tmp_image_ds_loc) [ 1108.572260] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] vm_util.copy_virtual_disk( [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] session._wait_for_task(vmdk_copy_task) [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] return self.wait_for_task(task_ref) [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] return evt.wait() [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] result = hub.switch() [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] return self.greenlet.switch() [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1108.572642] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] self.f(*self.args, **self.kw) [ 1108.572995] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1108.572995] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] raise exceptions.translate_fault(task_info.error) [ 1108.572995] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1108.572995] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Faults: ['InvalidArgument'] [ 1108.572995] env[61855]: ERROR nova.compute.manager [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] [ 1108.573205] env[61855]: DEBUG nova.compute.utils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1108.574692] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Build of instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 was re-scheduled: A specified parameter was not correct: fileType [ 1108.574692] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1108.575179] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1108.575370] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1108.575551] env[61855]: DEBUG nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1108.575766] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1108.943762] env[61855]: DEBUG nova.network.neutron [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.957975] env[61855]: INFO nova.compute.manager [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Took 0.38 seconds to deallocate network for instance. [ 1109.077372] env[61855]: INFO nova.scheduler.client.report [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Deleted allocations for instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 [ 1109.101336] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f7ae44fc-fc92-43c2-a4a6-94313565f859 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 519.568s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.102585] env[61855]: DEBUG oslo_concurrency.lockutils [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 320.905s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.105865] env[61855]: DEBUG oslo_concurrency.lockutils [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Acquiring lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.105865] env[61855]: DEBUG oslo_concurrency.lockutils [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.105865] env[61855]: DEBUG oslo_concurrency.lockutils [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.105865] env[61855]: INFO nova.compute.manager [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Terminating instance [ 1109.108781] env[61855]: DEBUG nova.compute.manager [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1109.108781] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1109.108964] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d84c988-b6d2-4d96-b3b7-dc22d537f025 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.114214] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1109.120999] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec8d515-27bf-4fc5-be0f-db9ec46e5f7d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.151233] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b0b21f7-bdc0-488c-a7fc-234727c26b68 could not be found. [ 1109.151462] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1109.151635] env[61855]: INFO nova.compute.manager [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1109.151891] env[61855]: DEBUG oslo.service.loopingcall [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1109.152140] env[61855]: DEBUG nova.compute.manager [-] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1109.152237] env[61855]: DEBUG nova.network.neutron [-] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1109.182088] env[61855]: DEBUG nova.network.neutron [-] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.185181] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.185181] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.186782] env[61855]: INFO nova.compute.claims [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.195700] env[61855]: INFO nova.compute.manager [-] [instance: 9b0b21f7-bdc0-488c-a7fc-234727c26b68] Took 0.04 seconds to deallocate network for instance. [ 1109.300726] env[61855]: DEBUG oslo_concurrency.lockutils [None req-06b9d86d-4e82-4dfd-bba1-b48cc141ca19 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "9b0b21f7-bdc0-488c-a7fc-234727c26b68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.546927] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acc550c-3928-457c-a3d0-1edb55ce2f22 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.554063] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088f2080-8133-477f-a10d-7fa8fac88449 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.583590] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb28c7b-df1f-4ae9-93d6-3918084a61f4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.590072] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab82bac-98e9-46c0-bef5-52c6ea33c2d6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.602502] env[61855]: DEBUG nova.compute.provider_tree [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.610681] env[61855]: DEBUG nova.scheduler.client.report [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1109.626318] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.441s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.626760] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1109.657323] env[61855]: DEBUG nova.compute.utils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1109.658670] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1109.658844] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1109.667153] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1109.731426] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1109.754554] env[61855]: DEBUG nova.policy [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '039d86ded7fb43b2bdec3e78a123e9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '850cd134d5904c9a82c4ca1f1a9e08d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1109.757807] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1109.758040] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1109.758207] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1109.758402] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1109.758590] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1109.758747] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1109.758956] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1109.759135] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1109.759310] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1109.759470] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1109.759644] env[61855]: DEBUG nova.virt.hardware [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1109.760767] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737fb234-d0d3-4abc-8640-24ec3814c72a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.768511] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76e3a76-42f0-4628-b94c-931c61899db5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.121584] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Successfully created port: efa480d4-db49-4bb1-b6f1-7d77aea95791 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.862741] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Successfully updated port: efa480d4-db49-4bb1-b6f1-7d77aea95791 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1110.875191] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "refresh_cache-cc01e7e2-26c0-4936-9dec-edd5578fe1e1" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.875350] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquired lock "refresh_cache-cc01e7e2-26c0-4936-9dec-edd5578fe1e1" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.875499] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1110.923058] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1111.152753] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Updating instance_info_cache with network_info: [{"id": "efa480d4-db49-4bb1-b6f1-7d77aea95791", "address": "fa:16:3e:d6:7a:a8", "network": {"id": "3e0e48ec-4d37-4ca8-80c1-7b3bef0e24c8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1667872361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "850cd134d5904c9a82c4ca1f1a9e08d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefa480d4-db", "ovs_interfaceid": "efa480d4-db49-4bb1-b6f1-7d77aea95791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.164570] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Releasing lock "refresh_cache-cc01e7e2-26c0-4936-9dec-edd5578fe1e1" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.164905] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Instance network_info: |[{"id": "efa480d4-db49-4bb1-b6f1-7d77aea95791", "address": "fa:16:3e:d6:7a:a8", "network": {"id": "3e0e48ec-4d37-4ca8-80c1-7b3bef0e24c8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1667872361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "850cd134d5904c9a82c4ca1f1a9e08d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefa480d4-db", "ovs_interfaceid": "efa480d4-db49-4bb1-b6f1-7d77aea95791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.165322] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:7a:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '140f4558-c11e-4af4-ab36-234e2d2f80a4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efa480d4-db49-4bb1-b6f1-7d77aea95791', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1111.175085] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Creating folder: Project (850cd134d5904c9a82c4ca1f1a9e08d2). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1111.178270] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-503d06e0-26d7-499f-802b-c3f5d82b1842 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.180961] env[61855]: DEBUG nova.compute.manager [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Received event network-vif-plugged-efa480d4-db49-4bb1-b6f1-7d77aea95791 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1111.181673] env[61855]: DEBUG oslo_concurrency.lockutils [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] Acquiring lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.181905] env[61855]: DEBUG oslo_concurrency.lockutils [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.182090] env[61855]: DEBUG oslo_concurrency.lockutils [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.182265] env[61855]: DEBUG nova.compute.manager [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] No waiting events found dispatching network-vif-plugged-efa480d4-db49-4bb1-b6f1-7d77aea95791 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1111.182433] env[61855]: WARNING nova.compute.manager [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Received unexpected event network-vif-plugged-efa480d4-db49-4bb1-b6f1-7d77aea95791 for instance with vm_state building and task_state spawning. [ 1111.182632] env[61855]: DEBUG nova.compute.manager [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Received event network-changed-efa480d4-db49-4bb1-b6f1-7d77aea95791 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1111.182874] env[61855]: DEBUG nova.compute.manager [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Refreshing instance network info cache due to event network-changed-efa480d4-db49-4bb1-b6f1-7d77aea95791. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1111.183097] env[61855]: DEBUG oslo_concurrency.lockutils [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] Acquiring lock "refresh_cache-cc01e7e2-26c0-4936-9dec-edd5578fe1e1" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.183468] env[61855]: DEBUG oslo_concurrency.lockutils [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] Acquired lock "refresh_cache-cc01e7e2-26c0-4936-9dec-edd5578fe1e1" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.183655] env[61855]: DEBUG nova.network.neutron [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Refreshing network info cache for port efa480d4-db49-4bb1-b6f1-7d77aea95791 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1111.197032] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Created folder: Project (850cd134d5904c9a82c4ca1f1a9e08d2) in parent group-v847048. [ 1111.197723] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Creating folder: Instances. Parent ref: group-v847113. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1111.198454] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-961772c3-4b44-4a7d-8857-db64a0dceb3d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.208231] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Created folder: Instances in parent group-v847113. [ 1111.208464] env[61855]: DEBUG oslo.service.loopingcall [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1111.208642] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1111.208845] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05b40f0f-31dd-40bb-8c56-c90bbc0f86e6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.228386] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1111.228386] env[61855]: value = "task-4302879" [ 1111.228386] env[61855]: _type = "Task" [ 1111.228386] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.236208] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302879, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.740362] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302879, 'name': CreateVM_Task, 'duration_secs': 0.285814} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.740542] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1111.741238] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.741405] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.741735] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1111.741989] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f120ca39-eba0-4654-a3a9-6ee4ade32bdd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.746616] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Waiting for the task: (returnval){ [ 1111.746616] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b36255-cb0d-3efa-a9cd-fbc857aae094" [ 1111.746616] env[61855]: _type = "Task" [ 1111.746616] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.747410] env[61855]: DEBUG nova.network.neutron [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Updated VIF entry in instance network info cache for port efa480d4-db49-4bb1-b6f1-7d77aea95791. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1111.747740] env[61855]: DEBUG nova.network.neutron [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Updating instance_info_cache with network_info: [{"id": "efa480d4-db49-4bb1-b6f1-7d77aea95791", "address": "fa:16:3e:d6:7a:a8", "network": {"id": "3e0e48ec-4d37-4ca8-80c1-7b3bef0e24c8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1667872361-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "850cd134d5904c9a82c4ca1f1a9e08d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefa480d4-db", "ovs_interfaceid": "efa480d4-db49-4bb1-b6f1-7d77aea95791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.757137] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b36255-cb0d-3efa-a9cd-fbc857aae094, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.758121] env[61855]: DEBUG oslo_concurrency.lockutils [req-f2b6908a-b31d-42dd-8eb3-ee57e510d671 req-f2940ac8-2aed-4504-b48d-b46b5497ef39 service nova] Releasing lock "refresh_cache-cc01e7e2-26c0-4936-9dec-edd5578fe1e1" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.259887] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.259887] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1112.259887] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.926512] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.926889] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61855) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1117.936727] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.937147] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1117.949942] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] There are 0 instances to clean {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1121.932481] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.154842] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.923907] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.924109] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1122.924236] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1122.945537] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.945813] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.945850] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946020] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946182] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946317] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946442] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946562] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946680] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946798] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1122.946921] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1122.947464] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.947845] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.947845] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.958896] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.959136] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.959303] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.959467] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1122.960716] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56f40dc-cab3-4f89-8995-66da01525ed5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.969247] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfdd26a-6c48-47c2-979f-a285edcc1d24 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.983391] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8b5d61-cf7d-4db1-8000-7a86054fa137 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.989607] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bfe51a-0cc6-456c-9ec4-4efc454a62d2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.018295] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180628MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1123.018458] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.018648] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.173095] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 3ae180bd-526d-481f-958b-ca3af96b4406 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173095] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173095] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173095] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173442] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173442] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173442] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173442] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173589] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.173589] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.187353] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.211216] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 02f2ee69-9ecf-4176-943e-06cdf255c92d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.223441] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 54a8ce25-2900-44a4-9985-b70514fcc9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.234056] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance decbac6e-4d06-42ea-bc7b-9050ae0dba6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.244315] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2dc6df74-b8c5-472f-9c02-d44a549e8aea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.256085] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f6e20ee2-94f3-4e24-a14d-1ba5eab45823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.266459] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b38fa534-0d62-40ce-ae69-9275ffe839e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.276802] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.286629] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c9608176-75bf-418f-b91d-79c6d997f543 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.296829] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 78755b45-1bb8-4a3b-9c51-7408425a561f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.306695] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ff5c9d8-b95d-4127-95eb-fece90efe346 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.316387] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance fe3646dd-a2ac-4ccb-9761-7c9b95be690e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.326153] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 44f7b0f2-7585-4def-98d2-e34039db404c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.335320] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1123.335549] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1123.335712] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1123.617041] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a294d0bc-cdbe-42d4-9a49-ae88918a68c2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.625016] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284d12ce-bcf3-4dc5-abf2-eb98f6239e46 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.654288] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e50bf65-be3e-4dca-a6b8-e3822245b7cc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.661212] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec92d50-f2b6-4183-83d6-acb0fe7ee85e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.674663] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.683734] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1123.698385] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1123.698826] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.680s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.699071] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.682602] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.682983] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.682983] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1124.919818] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.923745] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.923623] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.238638] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.261345] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Getting list of instances from cluster (obj){ [ 1133.261345] env[61855]: value = "domain-c8" [ 1133.261345] env[61855]: _type = "ClusterComputeResource" [ 1133.261345] env[61855]: } {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1133.262965] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7a4782-f362-41ac-a896-f6d8f9a7fabb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.280218] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Got total of 10 instances {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1133.280403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 3ae180bd-526d-481f-958b-ca3af96b4406 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.280597] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 0519cba4-d9b1-4f54-b889-2c09d2d26b14 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.280760] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.280919] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 0d6f9828-e93a-474d-af31-f0ee6cb2149f {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.281096] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 29ae12b6-adc9-4a25-8a89-9a88470b3818 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.281251] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.281401] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.281553] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 1f2b9ec1-5449-45a9-9691-857b15aaa9ff {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.281703] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid adfd94b5-7e03-49d1-a445-c58b296e5185 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.281854] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid cc01e7e2-26c0-4936-9dec-edd5578fe1e1 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1133.282210] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "3ae180bd-526d-481f-958b-ca3af96b4406" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.282451] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.283086] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.283086] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.283240] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.283324] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.283520] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.283713] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.283908] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "adfd94b5-7e03-49d1-a445-c58b296e5185" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.284116] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.969128] env[61855]: WARNING oslo_vmware.rw_handles [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1156.969128] env[61855]: ERROR oslo_vmware.rw_handles [ 1156.969128] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1156.969937] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1156.969937] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Copying Virtual Disk [datastore2] vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/3ff1e0fb-a6e5-41e5-afb6-2c5a191c6d21/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1156.970101] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13c8d426-bb44-4fd8-b627-f3d53b2de214 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.978171] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Waiting for the task: (returnval){ [ 1156.978171] env[61855]: value = "task-4302880" [ 1156.978171] env[61855]: _type = "Task" [ 1156.978171] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.986513] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Task: {'id': task-4302880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.489553] env[61855]: DEBUG oslo_vmware.exceptions [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1157.489847] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.490412] env[61855]: ERROR nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1157.490412] env[61855]: Faults: ['InvalidArgument'] [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Traceback (most recent call last): [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] yield resources [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self.driver.spawn(context, instance, image_meta, [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self._fetch_image_if_missing(context, vi) [ 1157.490412] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] image_cache(vi, tmp_image_ds_loc) [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] vm_util.copy_virtual_disk( [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] session._wait_for_task(vmdk_copy_task) [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] return self.wait_for_task(task_ref) [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] return evt.wait() [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] result = hub.switch() [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1157.490838] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] return self.greenlet.switch() [ 1157.491316] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1157.491316] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self.f(*self.args, **self.kw) [ 1157.491316] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1157.491316] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] raise exceptions.translate_fault(task_info.error) [ 1157.491316] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1157.491316] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Faults: ['InvalidArgument'] [ 1157.491316] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] [ 1157.491316] env[61855]: INFO nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Terminating instance [ 1157.493058] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.493058] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.493058] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b830548a-df30-4d02-b941-3aa5c8823d3e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.494978] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1157.495438] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1157.496173] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c86a0e-2ccb-4029-b277-4fe5d5f37edf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.503977] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1157.504208] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ece89a8-07e1-47ec-a4d8-ba09e82cb001 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.506399] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1157.506573] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1157.507508] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-188a788c-82dd-4656-86cd-408287edc228 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.512141] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Waiting for the task: (returnval){ [ 1157.512141] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]520a8d02-9e83-de60-73f6-6c93737d789f" [ 1157.512141] env[61855]: _type = "Task" [ 1157.512141] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.520278] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]520a8d02-9e83-de60-73f6-6c93737d789f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.582133] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1157.582410] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1157.582624] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Deleting the datastore file [datastore2] 3ae180bd-526d-481f-958b-ca3af96b4406 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1157.582900] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c27dad2c-bea1-47f7-a567-7859c4e837a1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.590084] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Waiting for the task: (returnval){ [ 1157.590084] env[61855]: value = "task-4302882" [ 1157.590084] env[61855]: _type = "Task" [ 1157.590084] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.597306] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Task: {'id': task-4302882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.022594] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1158.022941] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Creating directory with path [datastore2] vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1158.023248] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae693207-c036-400e-a23e-006e84878c26 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.035365] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Created directory with path [datastore2] vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1158.035580] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Fetch image to [datastore2] vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1158.035727] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1158.036604] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314fe143-ddf1-4780-9125-ba5410b6f9de {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.043055] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488919c8-b83e-4584-b4ad-520608598099 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.052267] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee25e44-366a-4682-887c-6e2a770b770f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.083844] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b6f4f2-c41c-487f-b99b-e3597eddc96b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.089555] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d2944a14-2dc0-4584-8779-54a9bc9562d7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.098767] env[61855]: DEBUG oslo_vmware.api [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Task: {'id': task-4302882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072484} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.098967] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.099198] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1158.099395] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1158.099570] env[61855]: INFO nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1158.101693] env[61855]: DEBUG nova.compute.claims [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1158.101863] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.102093] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.120460] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1158.179729] env[61855]: DEBUG oslo_vmware.rw_handles [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1158.242195] env[61855]: DEBUG oslo_vmware.rw_handles [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1158.242405] env[61855]: DEBUG oslo_vmware.rw_handles [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1158.504084] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a22d77-fcc2-4615-bce1-d6c73e6a1421 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.512807] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477306b5-91cb-49e1-ae9f-a920ed2789d4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.542622] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33bd134-2242-4ded-9670-0392575f0986 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.550043] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a134f3a1-d584-479d-bc0c-c154d40f020d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.562581] env[61855]: DEBUG nova.compute.provider_tree [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.572024] env[61855]: DEBUG nova.scheduler.client.report [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1158.585588] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.483s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.586111] env[61855]: ERROR nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1158.586111] env[61855]: Faults: ['InvalidArgument'] [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Traceback (most recent call last): [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self.driver.spawn(context, instance, image_meta, [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self._fetch_image_if_missing(context, vi) [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] image_cache(vi, tmp_image_ds_loc) [ 1158.586111] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] vm_util.copy_virtual_disk( [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] session._wait_for_task(vmdk_copy_task) [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] return self.wait_for_task(task_ref) [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] return evt.wait() [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] result = hub.switch() [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] return self.greenlet.switch() [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1158.586474] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] self.f(*self.args, **self.kw) [ 1158.586807] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1158.586807] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] raise exceptions.translate_fault(task_info.error) [ 1158.586807] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1158.586807] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Faults: ['InvalidArgument'] [ 1158.586807] env[61855]: ERROR nova.compute.manager [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] [ 1158.586807] env[61855]: DEBUG nova.compute.utils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1158.588168] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Build of instance 3ae180bd-526d-481f-958b-ca3af96b4406 was re-scheduled: A specified parameter was not correct: fileType [ 1158.588168] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1158.588544] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1158.588714] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1158.588872] env[61855]: DEBUG nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1158.589182] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1158.859587] env[61855]: DEBUG nova.network.neutron [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.876279] env[61855]: INFO nova.compute.manager [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Took 0.29 seconds to deallocate network for instance. [ 1158.977282] env[61855]: INFO nova.scheduler.client.report [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Deleted allocations for instance 3ae180bd-526d-481f-958b-ca3af96b4406 [ 1158.999057] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9b5ae933-ab1f-45f8-a1f1-481c8f588e08 tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "3ae180bd-526d-481f-958b-ca3af96b4406" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 564.166s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.000202] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "3ae180bd-526d-481f-958b-ca3af96b4406" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 366.174s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.000433] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Acquiring lock "3ae180bd-526d-481f-958b-ca3af96b4406-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.000642] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "3ae180bd-526d-481f-958b-ca3af96b4406-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.000810] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "3ae180bd-526d-481f-958b-ca3af96b4406-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.003935] env[61855]: INFO nova.compute.manager [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Terminating instance [ 1159.005759] env[61855]: DEBUG nova.compute.manager [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1159.005965] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1159.006551] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1f74dc7-8585-4165-be87-586ad9718c44 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.015651] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21b2f4d-8681-49ee-8f45-f1e6a0d83b36 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.027396] env[61855]: DEBUG nova.compute.manager [None req-f2e6ce4e-6ff0-42fe-8453-b8ac7cdb29c6 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: 9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1159.048265] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ae180bd-526d-481f-958b-ca3af96b4406 could not be found. [ 1159.048504] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1159.048831] env[61855]: INFO nova.compute.manager [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1159.049184] env[61855]: DEBUG oslo.service.loopingcall [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1159.050186] env[61855]: DEBUG nova.compute.manager [-] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1159.050186] env[61855]: DEBUG nova.network.neutron [-] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1159.056881] env[61855]: DEBUG nova.compute.manager [None req-f2e6ce4e-6ff0-42fe-8453-b8ac7cdb29c6 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: 9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1159.081293] env[61855]: DEBUG nova.network.neutron [-] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.083456] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f2e6ce4e-6ff0-42fe-8453-b8ac7cdb29c6 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "9ed9c3f5-bd2c-4bfb-a6db-6fbaaf3dc8d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.067s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.089812] env[61855]: INFO nova.compute.manager [-] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] Took 0.04 seconds to deallocate network for instance. [ 1159.094505] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1159.145205] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.145205] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.145770] env[61855]: INFO nova.compute.claims [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1159.182723] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e715159c-dcd0-430e-a05f-0d932d2121fc tempest-ServerDiagnosticsNegativeTest-582607195 tempest-ServerDiagnosticsNegativeTest-582607195-project-member] Lock "3ae180bd-526d-481f-958b-ca3af96b4406" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.184849] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "3ae180bd-526d-481f-958b-ca3af96b4406" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.901s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.184849] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 3ae180bd-526d-481f-958b-ca3af96b4406] During sync_power_state the instance has a pending task (deleting). Skip. [ 1159.184849] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "3ae180bd-526d-481f-958b-ca3af96b4406" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.520542] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b93224-d69a-42cd-948f-0775111362bd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.528141] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd3afb4-768c-4679-ae4e-decf5acac18f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.559558] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1a1ad9-c7cd-475c-9566-b412713adbcd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.566704] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0740dc-1e73-405e-9cd2-481d35ac2a2f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.580304] env[61855]: DEBUG nova.compute.provider_tree [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.588868] env[61855]: DEBUG nova.scheduler.client.report [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1159.602182] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.458s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.605834] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1159.639819] env[61855]: DEBUG nova.compute.utils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1159.641073] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1159.641253] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1159.651690] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1159.712287] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1159.730025] env[61855]: DEBUG nova.policy [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a32602568f3427c81ba87e94876a373', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '777a21bef34b432092785f87f9195582', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1159.739072] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1159.739309] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1159.739469] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.739653] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1159.739802] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.739952] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1159.740172] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1159.740340] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1159.740508] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1159.740673] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1159.740845] env[61855]: DEBUG nova.virt.hardware [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1159.741692] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e941be-20fc-4117-8ac1-62de7c77838b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.750789] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b5d449-539a-42a7-806a-e56b187ba6bb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.044067] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Successfully created port: 1821fe2a-28b2-4fbb-a773-e8107a51fec7 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1160.909844] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Successfully updated port: 1821fe2a-28b2-4fbb-a773-e8107a51fec7 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1160.923778] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "refresh_cache-aa9a75c4-371f-407e-a79e-133606a9fabc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.923961] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquired lock "refresh_cache-aa9a75c4-371f-407e-a79e-133606a9fabc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.924152] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1160.997661] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1161.075146] env[61855]: DEBUG nova.compute.manager [req-eaff5cce-3152-4528-a878-4888c288b47e req-60f3244d-1f45-4059-9c6d-da2941a42f87 service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Received event network-vif-plugged-1821fe2a-28b2-4fbb-a773-e8107a51fec7 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1161.075434] env[61855]: DEBUG oslo_concurrency.lockutils [req-eaff5cce-3152-4528-a878-4888c288b47e req-60f3244d-1f45-4059-9c6d-da2941a42f87 service nova] Acquiring lock "aa9a75c4-371f-407e-a79e-133606a9fabc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.075603] env[61855]: DEBUG oslo_concurrency.lockutils [req-eaff5cce-3152-4528-a878-4888c288b47e req-60f3244d-1f45-4059-9c6d-da2941a42f87 service nova] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.075772] env[61855]: DEBUG oslo_concurrency.lockutils [req-eaff5cce-3152-4528-a878-4888c288b47e req-60f3244d-1f45-4059-9c6d-da2941a42f87 service nova] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.075940] env[61855]: DEBUG nova.compute.manager [req-eaff5cce-3152-4528-a878-4888c288b47e req-60f3244d-1f45-4059-9c6d-da2941a42f87 service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] No waiting events found dispatching network-vif-plugged-1821fe2a-28b2-4fbb-a773-e8107a51fec7 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1161.076141] env[61855]: WARNING nova.compute.manager [req-eaff5cce-3152-4528-a878-4888c288b47e req-60f3244d-1f45-4059-9c6d-da2941a42f87 service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Received unexpected event network-vif-plugged-1821fe2a-28b2-4fbb-a773-e8107a51fec7 for instance with vm_state building and task_state spawning. [ 1161.243979] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Updating instance_info_cache with network_info: [{"id": "1821fe2a-28b2-4fbb-a773-e8107a51fec7", "address": "fa:16:3e:8d:12:7a", "network": {"id": "29e44a52-f5eb-4aa1-9b65-22b8438f141f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-953195993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777a21bef34b432092785f87f9195582", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1821fe2a-28", "ovs_interfaceid": "1821fe2a-28b2-4fbb-a773-e8107a51fec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.261330] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Releasing lock "refresh_cache-aa9a75c4-371f-407e-a79e-133606a9fabc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.261478] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Instance network_info: |[{"id": "1821fe2a-28b2-4fbb-a773-e8107a51fec7", "address": "fa:16:3e:8d:12:7a", "network": {"id": "29e44a52-f5eb-4aa1-9b65-22b8438f141f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-953195993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777a21bef34b432092785f87f9195582", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1821fe2a-28", "ovs_interfaceid": "1821fe2a-28b2-4fbb-a773-e8107a51fec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1161.261478] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:12:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1821fe2a-28b2-4fbb-a773-e8107a51fec7', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1161.269205] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Creating folder: Project (777a21bef34b432092785f87f9195582). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1161.270130] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c301426-cbe5-4be6-9817-50b4f7f6513b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.280610] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Created folder: Project (777a21bef34b432092785f87f9195582) in parent group-v847048. [ 1161.280796] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Creating folder: Instances. Parent ref: group-v847116. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1161.281037] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-393f997d-5943-4859-9262-8f4efa210fd7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.289967] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Created folder: Instances in parent group-v847116. [ 1161.290261] env[61855]: DEBUG oslo.service.loopingcall [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1161.290388] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1161.290575] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0d40537-4b61-4a2a-91ca-d9119544fe25 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.309856] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1161.309856] env[61855]: value = "task-4302885" [ 1161.309856] env[61855]: _type = "Task" [ 1161.309856] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.020600] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302885, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.021857] env[61855]: WARNING oslo_vmware.common.loopingcall [-] task run outlasted interval by 0.21073299999999995 sec [ 1162.030082] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302885, 'name': CreateVM_Task, 'duration_secs': 0.30259} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.030269] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1162.030930] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1162.031136] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.031482] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1162.031664] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29fd659d-5a71-4f3d-8487-0e0c10497ecf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.036198] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Waiting for the task: (returnval){ [ 1162.036198] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52cb01fb-dd10-fa0a-1617-2a1ebb32321c" [ 1162.036198] env[61855]: _type = "Task" [ 1162.036198] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.043877] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52cb01fb-dd10-fa0a-1617-2a1ebb32321c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.264753] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.264753] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.545759] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1162.546038] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1162.546318] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1163.098854] env[61855]: DEBUG nova.compute.manager [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Received event network-changed-1821fe2a-28b2-4fbb-a773-e8107a51fec7 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1163.099063] env[61855]: DEBUG nova.compute.manager [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Refreshing instance network info cache due to event network-changed-1821fe2a-28b2-4fbb-a773-e8107a51fec7. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1163.099279] env[61855]: DEBUG oslo_concurrency.lockutils [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] Acquiring lock "refresh_cache-aa9a75c4-371f-407e-a79e-133606a9fabc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1163.099422] env[61855]: DEBUG oslo_concurrency.lockutils [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] Acquired lock "refresh_cache-aa9a75c4-371f-407e-a79e-133606a9fabc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.099584] env[61855]: DEBUG nova.network.neutron [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Refreshing network info cache for port 1821fe2a-28b2-4fbb-a773-e8107a51fec7 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1163.405969] env[61855]: DEBUG nova.network.neutron [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Updated VIF entry in instance network info cache for port 1821fe2a-28b2-4fbb-a773-e8107a51fec7. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1163.406419] env[61855]: DEBUG nova.network.neutron [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Updating instance_info_cache with network_info: [{"id": "1821fe2a-28b2-4fbb-a773-e8107a51fec7", "address": "fa:16:3e:8d:12:7a", "network": {"id": "29e44a52-f5eb-4aa1-9b65-22b8438f141f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-953195993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777a21bef34b432092785f87f9195582", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1821fe2a-28", "ovs_interfaceid": "1821fe2a-28b2-4fbb-a773-e8107a51fec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.415657] env[61855]: DEBUG oslo_concurrency.lockutils [req-182d8527-6cd2-41a9-992f-7082167afed7 req-1e945ed1-0277-40a3-b835-20b2af250cfc service nova] Releasing lock "refresh_cache-aa9a75c4-371f-407e-a79e-133606a9fabc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.063179] env[61855]: DEBUG oslo_concurrency.lockutils [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "aa9a75c4-371f-407e-a79e-133606a9fabc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.964389] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.924256] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.924357] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1183.924472] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1183.951403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.951594] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.951730] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952360] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952360] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952360] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952360] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952360] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952645] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952645] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1183.952645] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1183.953202] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.953439] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.923606] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.924115] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.924115] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1184.924325] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.938704] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.938775] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.938958] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.939158] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1184.944443] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20a0abf-9862-430c-a10b-a8f319ac4a4c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.958909] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614d0566-e863-49d1-bd8b-3d17b886987f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.973411] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4b9eec-b214-4749-b465-73838a4d27c2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.979663] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51489269-8e3b-443b-872d-30d43170a057 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.008965] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180597MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1185.009124] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.009324] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.102609] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.102777] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.102910] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.103045] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.103168] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.103290] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.103442] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.103565] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.103682] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.103797] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.115346] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance decbac6e-4d06-42ea-bc7b-9050ae0dba6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.126121] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2dc6df74-b8c5-472f-9c02-d44a549e8aea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.137880] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance f6e20ee2-94f3-4e24-a14d-1ba5eab45823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.158436] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b38fa534-0d62-40ce-ae69-9275ffe839e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.173123] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.189700] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c9608176-75bf-418f-b91d-79c6d997f543 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.209547] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 78755b45-1bb8-4a3b-9c51-7408425a561f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.228338] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ff5c9d8-b95d-4127-95eb-fece90efe346 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.241486] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance fe3646dd-a2ac-4ccb-9761-7c9b95be690e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.257997] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 44f7b0f2-7585-4def-98d2-e34039db404c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.274029] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.292566] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1185.292859] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1185.293056] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1185.316816] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing inventories for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1185.332456] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating ProviderTree inventory for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1185.332650] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1185.344453] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing aggregate associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, aggregates: None {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1185.363603] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing trait associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1185.699130] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf956f2-b039-47dc-9acd-b24411a29b38 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.710086] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62612385-cd37-49bc-ab89-15eb33123cb1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.743487] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1995abd7-72cc-4ed3-a7e1-0fbc985eb815 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.751490] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac202888-b56d-43be-8647-59d0eb654ab6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.765236] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.775851] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1185.796051] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1185.796269] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.787s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.797769] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.926525] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.822896] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "accbccfe-3858-4a4c-b47b-3f12976c8c20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.823313] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.780934] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "cf66f181-60e6-43d4-a561-a32e9174448d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.781186] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "cf66f181-60e6-43d4-a561-a32e9174448d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.519443] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "242e1a24-3f5b-4509-8677-e5a4c7883605" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.519443] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.603631] env[61855]: DEBUG oslo_concurrency.lockutils [None req-76c349b8-60a2-4043-bda2-83ca782c628f tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] Acquiring lock "6a976d89-a637-4bcc-83f3-fd509b5bad0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.603866] env[61855]: DEBUG oslo_concurrency.lockutils [None req-76c349b8-60a2-4043-bda2-83ca782c628f tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] Lock "6a976d89-a637-4bcc-83f3-fd509b5bad0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.789556] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bac72562-6f06-4e67-90c9-d8ab97481937 tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] Acquiring lock "b5ad73e6-2c48-41c4-85f9-3b9f74afb983" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.790033] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bac72562-6f06-4e67-90c9-d8ab97481937 tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] Lock "b5ad73e6-2c48-41c4-85f9-3b9f74afb983" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.088946] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1872625f-a5f8-4362-b063-121c1eff9ce3 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.089251] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1872625f-a5f8-4362-b063-121c1eff9ce3 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.791899] env[61855]: WARNING oslo_vmware.rw_handles [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1203.791899] env[61855]: ERROR oslo_vmware.rw_handles [ 1203.792558] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1203.795839] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1203.796293] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Copying Virtual Disk [datastore2] vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/3b0e029f-52f0-4d7b-9d56-0b83a89882ad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1203.796703] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cbb3dd0-0ad8-4a88-8237-80237125fa68 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.807057] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Waiting for the task: (returnval){ [ 1203.807057] env[61855]: value = "task-4302886" [ 1203.807057] env[61855]: _type = "Task" [ 1203.807057] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.815146] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Task: {'id': task-4302886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.318443] env[61855]: DEBUG oslo_vmware.exceptions [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1204.318780] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.319340] env[61855]: ERROR nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1204.319340] env[61855]: Faults: ['InvalidArgument'] [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Traceback (most recent call last): [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] yield resources [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self.driver.spawn(context, instance, image_meta, [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self._fetch_image_if_missing(context, vi) [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] image_cache(vi, tmp_image_ds_loc) [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] vm_util.copy_virtual_disk( [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] session._wait_for_task(vmdk_copy_task) [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] return self.wait_for_task(task_ref) [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] return evt.wait() [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] result = hub.switch() [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] return self.greenlet.switch() [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self.f(*self.args, **self.kw) [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] raise exceptions.translate_fault(task_info.error) [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Faults: ['InvalidArgument'] [ 1204.319340] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] [ 1204.320310] env[61855]: INFO nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Terminating instance [ 1204.321175] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.321382] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1204.321621] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b175dd5-8086-4fcc-ac7b-23bbb1a2dc82 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.323839] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1204.323961] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1204.324702] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695f62a5-5e0e-4309-bbd2-33a56d096149 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.331996] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1204.333083] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bce4fe65-5405-4771-805e-1a01af137874 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.334525] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1204.335170] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1204.335758] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66e69dc-f07d-4867-8511-921828095e10 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.341374] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1204.341374] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a0269f-990c-2afb-d20b-2eca541efda0" [ 1204.341374] env[61855]: _type = "Task" [ 1204.341374] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.349795] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a0269f-990c-2afb-d20b-2eca541efda0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.406318] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1204.406553] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1204.406738] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Deleting the datastore file [datastore2] 0519cba4-d9b1-4f54-b889-2c09d2d26b14 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1204.407025] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5289e8f7-0564-451d-ac7b-f3ff0531a851 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.413697] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Waiting for the task: (returnval){ [ 1204.413697] env[61855]: value = "task-4302888" [ 1204.413697] env[61855]: _type = "Task" [ 1204.413697] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.421160] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Task: {'id': task-4302888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.852570] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1204.852570] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1204.852944] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4901dfa1-931a-46b4-a9c6-98abd9b3015b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.865698] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1204.865915] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Fetch image to [datastore2] vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1204.866115] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1204.866893] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7233f53-30a4-4384-a992-85fb8819268f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.873490] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c6c684-ba75-4ebb-9b9d-ea092772c29c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.882565] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5faafa18-e3c1-4622-85cf-f0ab94daf793 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.913197] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c8d480-d963-4711-9936-772ffbb1e8ec {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.925921] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-42a1c27f-0363-472c-b24e-ddc66c6afaa5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.927920] env[61855]: DEBUG oslo_vmware.api [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Task: {'id': task-4302888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081331} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.928183] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.928367] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1204.928540] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1204.928716] env[61855]: INFO nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1204.930918] env[61855]: DEBUG nova.compute.claims [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1204.931111] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.931332] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.953890] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1205.160072] env[61855]: DEBUG oslo_vmware.rw_handles [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1205.225787] env[61855]: DEBUG oslo_vmware.rw_handles [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1205.226071] env[61855]: DEBUG oslo_vmware.rw_handles [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1205.459777] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9881128-5d8e-480f-aeb7-b3cb42395e67 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.467367] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5120993a-20e1-4b91-b462-f869b9882672 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.499298] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5b06e1-a059-4bf7-835a-5e873e620c4c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.505522] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d71f55-45f2-4584-a227-7a62fbe96fbb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.522171] env[61855]: DEBUG nova.compute.provider_tree [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.533919] env[61855]: DEBUG nova.scheduler.client.report [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1205.552552] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.621s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.553199] env[61855]: ERROR nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1205.553199] env[61855]: Faults: ['InvalidArgument'] [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Traceback (most recent call last): [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self.driver.spawn(context, instance, image_meta, [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self._fetch_image_if_missing(context, vi) [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] image_cache(vi, tmp_image_ds_loc) [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] vm_util.copy_virtual_disk( [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] session._wait_for_task(vmdk_copy_task) [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] return self.wait_for_task(task_ref) [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] return evt.wait() [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] result = hub.switch() [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] return self.greenlet.switch() [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] self.f(*self.args, **self.kw) [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] raise exceptions.translate_fault(task_info.error) [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Faults: ['InvalidArgument'] [ 1205.553199] env[61855]: ERROR nova.compute.manager [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] [ 1205.554525] env[61855]: DEBUG nova.compute.utils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1205.555843] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Build of instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 was re-scheduled: A specified parameter was not correct: fileType [ 1205.555843] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1205.556242] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1205.556419] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1205.556600] env[61855]: DEBUG nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1205.556763] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1206.154535] env[61855]: DEBUG nova.network.neutron [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.172018] env[61855]: INFO nova.compute.manager [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Took 0.61 seconds to deallocate network for instance. [ 1206.297377] env[61855]: INFO nova.scheduler.client.report [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Deleted allocations for instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 [ 1206.321039] env[61855]: DEBUG oslo_concurrency.lockutils [None req-68f1d8d0-effe-4083-89bc-fe6b7247335c tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 609.461s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.322270] env[61855]: DEBUG oslo_concurrency.lockutils [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 412.008s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.322493] env[61855]: DEBUG oslo_concurrency.lockutils [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Acquiring lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.322703] env[61855]: DEBUG oslo_concurrency.lockutils [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.323150] env[61855]: DEBUG oslo_concurrency.lockutils [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.325692] env[61855]: INFO nova.compute.manager [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Terminating instance [ 1206.330874] env[61855]: DEBUG nova.compute.manager [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1206.331097] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1206.331449] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e53b0464-e3cc-4a75-92a9-41409e643869 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.342346] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7864237d-eb73-486a-b73d-9ce2b3161c75 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.354609] env[61855]: DEBUG nova.compute.manager [None req-2a5b2582-45e6-49bc-9de2-18ddc691eb1f tempest-ServerActionsTestJSON-726919981 tempest-ServerActionsTestJSON-726919981-project-member] [instance: 02f2ee69-9ecf-4176-943e-06cdf255c92d] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1206.375780] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0519cba4-d9b1-4f54-b889-2c09d2d26b14 could not be found. [ 1206.376511] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1206.376511] env[61855]: INFO nova.compute.manager [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1206.376511] env[61855]: DEBUG oslo.service.loopingcall [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1206.376706] env[61855]: DEBUG nova.compute.manager [-] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1206.376806] env[61855]: DEBUG nova.network.neutron [-] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1206.382582] env[61855]: DEBUG nova.compute.manager [None req-2a5b2582-45e6-49bc-9de2-18ddc691eb1f tempest-ServerActionsTestJSON-726919981 tempest-ServerActionsTestJSON-726919981-project-member] [instance: 02f2ee69-9ecf-4176-943e-06cdf255c92d] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1206.421740] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2a5b2582-45e6-49bc-9de2-18ddc691eb1f tempest-ServerActionsTestJSON-726919981 tempest-ServerActionsTestJSON-726919981-project-member] Lock "02f2ee69-9ecf-4176-943e-06cdf255c92d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.590s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.432406] env[61855]: DEBUG nova.compute.manager [None req-09f01b8d-769d-40db-a4b3-1f09f5907ea8 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 54a8ce25-2900-44a4-9985-b70514fcc9d5] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1206.456350] env[61855]: DEBUG nova.compute.manager [None req-09f01b8d-769d-40db-a4b3-1f09f5907ea8 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] [instance: 54a8ce25-2900-44a4-9985-b70514fcc9d5] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1206.478596] env[61855]: DEBUG oslo_concurrency.lockutils [None req-09f01b8d-769d-40db-a4b3-1f09f5907ea8 tempest-VolumesAdminNegativeTest-1731413844 tempest-VolumesAdminNegativeTest-1731413844-project-member] Lock "54a8ce25-2900-44a4-9985-b70514fcc9d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.871s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.489324] env[61855]: DEBUG nova.compute.manager [None req-249c6bb1-eb3f-4fea-a632-1bd9ac3d0466 tempest-InstanceActionsNegativeTestJSON-237435190 tempest-InstanceActionsNegativeTestJSON-237435190-project-member] [instance: decbac6e-4d06-42ea-bc7b-9050ae0dba6b] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1206.516462] env[61855]: DEBUG nova.compute.manager [None req-249c6bb1-eb3f-4fea-a632-1bd9ac3d0466 tempest-InstanceActionsNegativeTestJSON-237435190 tempest-InstanceActionsNegativeTestJSON-237435190-project-member] [instance: decbac6e-4d06-42ea-bc7b-9050ae0dba6b] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1206.545613] env[61855]: DEBUG oslo_concurrency.lockutils [None req-249c6bb1-eb3f-4fea-a632-1bd9ac3d0466 tempest-InstanceActionsNegativeTestJSON-237435190 tempest-InstanceActionsNegativeTestJSON-237435190-project-member] Lock "decbac6e-4d06-42ea-bc7b-9050ae0dba6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.192s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.565201] env[61855]: DEBUG nova.network.neutron [-] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.566452] env[61855]: DEBUG nova.compute.manager [None req-be99f7f4-95ed-4188-8c3b-079573579a92 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 2dc6df74-b8c5-472f-9c02-d44a549e8aea] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1206.577444] env[61855]: INFO nova.compute.manager [-] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] Took 0.20 seconds to deallocate network for instance. [ 1206.597748] env[61855]: DEBUG nova.compute.manager [None req-be99f7f4-95ed-4188-8c3b-079573579a92 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 2dc6df74-b8c5-472f-9c02-d44a549e8aea] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1206.631242] env[61855]: DEBUG oslo_concurrency.lockutils [None req-be99f7f4-95ed-4188-8c3b-079573579a92 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "2dc6df74-b8c5-472f-9c02-d44a549e8aea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.917s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.644438] env[61855]: DEBUG nova.compute.manager [None req-b35e2170-4470-4975-8b60-8afd1663e390 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: f6e20ee2-94f3-4e24-a14d-1ba5eab45823] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1206.671458] env[61855]: DEBUG nova.compute.manager [None req-b35e2170-4470-4975-8b60-8afd1663e390 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: f6e20ee2-94f3-4e24-a14d-1ba5eab45823] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1206.690961] env[61855]: DEBUG oslo_concurrency.lockutils [None req-860b65c3-0bba-41a9-bb74-35d3e8ca6671 tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.367s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.690961] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 73.408s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.690961] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0519cba4-d9b1-4f54-b889-2c09d2d26b14] During sync_power_state the instance has a pending task (deleting). Skip. [ 1206.690961] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "0519cba4-d9b1-4f54-b889-2c09d2d26b14" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.708355] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b35e2170-4470-4975-8b60-8afd1663e390 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "f6e20ee2-94f3-4e24-a14d-1ba5eab45823" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.133s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.724801] env[61855]: DEBUG nova.compute.manager [None req-5d62fc02-c91b-49df-837d-acb51127fe2b tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: b38fa534-0d62-40ce-ae69-9275ffe839e7] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1206.751107] env[61855]: DEBUG nova.compute.manager [None req-5d62fc02-c91b-49df-837d-acb51127fe2b tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] [instance: b38fa534-0d62-40ce-ae69-9275ffe839e7] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1206.777794] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d62fc02-c91b-49df-837d-acb51127fe2b tempest-SecurityGroupsTestJSON-1165907180 tempest-SecurityGroupsTestJSON-1165907180-project-member] Lock "b38fa534-0d62-40ce-ae69-9275ffe839e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.835s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.788631] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1206.858369] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.858944] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.861763] env[61855]: INFO nova.compute.claims [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1207.310023] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdeebdf-ba68-440d-aa4a-2629c3dd84a4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.317659] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d331af1-dd06-4b3f-9f29-070a27316e26 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.356173] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba58627-77f6-4dc2-b2e9-882a3d64a6b9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.364321] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d6dc67-39a0-4204-981e-a0f06c9d5f27 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.379207] env[61855]: DEBUG nova.compute.provider_tree [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.391029] env[61855]: DEBUG nova.scheduler.client.report [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1207.407913] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.549s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.408496] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1207.460769] env[61855]: DEBUG nova.compute.utils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1207.465026] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1207.465239] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1207.478359] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1207.564080] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1207.598293] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1207.598540] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1207.598692] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1207.598871] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1207.599776] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1207.600040] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1207.600274] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1207.600441] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1207.600610] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1207.600773] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1207.600943] env[61855]: DEBUG nova.virt.hardware [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1207.603360] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db7cd4a-9300-4707-9ebe-6b0ebe17d89b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.608340] env[61855]: DEBUG nova.policy [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a01bd5c7b9304f7ca6b77aa8623cf467', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4611cc960de34328af78426d316c4227', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1207.616979] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b03690-fedd-4116-8aea-d20b3a8f4cb3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.219991] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Successfully created port: 2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1209.395237] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Successfully updated port: 2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1209.411338] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "refresh_cache-6fbd649d-1fce-440f-9911-09b74df51489" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.411498] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquired lock "refresh_cache-6fbd649d-1fce-440f-9911-09b74df51489" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.411818] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1209.474225] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1209.499711] env[61855]: DEBUG nova.compute.manager [req-39a0f023-4cd5-4874-98ed-d9aed0c6e42b req-1c76c44a-ae28-4964-a243-a49c871c8385 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Received event network-vif-plugged-2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1209.500013] env[61855]: DEBUG oslo_concurrency.lockutils [req-39a0f023-4cd5-4874-98ed-d9aed0c6e42b req-1c76c44a-ae28-4964-a243-a49c871c8385 service nova] Acquiring lock "6fbd649d-1fce-440f-9911-09b74df51489-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.500203] env[61855]: DEBUG oslo_concurrency.lockutils [req-39a0f023-4cd5-4874-98ed-d9aed0c6e42b req-1c76c44a-ae28-4964-a243-a49c871c8385 service nova] Lock "6fbd649d-1fce-440f-9911-09b74df51489-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.500366] env[61855]: DEBUG oslo_concurrency.lockutils [req-39a0f023-4cd5-4874-98ed-d9aed0c6e42b req-1c76c44a-ae28-4964-a243-a49c871c8385 service nova] Lock "6fbd649d-1fce-440f-9911-09b74df51489-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.500530] env[61855]: DEBUG nova.compute.manager [req-39a0f023-4cd5-4874-98ed-d9aed0c6e42b req-1c76c44a-ae28-4964-a243-a49c871c8385 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] No waiting events found dispatching network-vif-plugged-2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1209.500689] env[61855]: WARNING nova.compute.manager [req-39a0f023-4cd5-4874-98ed-d9aed0c6e42b req-1c76c44a-ae28-4964-a243-a49c871c8385 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Received unexpected event network-vif-plugged-2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b for instance with vm_state building and task_state spawning. [ 1209.585115] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.585115] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.613396] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.613396] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.639171] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "07bb6cf5-b125-4e08-a7fc-9b1c358d7e17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.639441] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "07bb6cf5-b125-4e08-a7fc-9b1c358d7e17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.721320] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Updating instance_info_cache with network_info: [{"id": "2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b", "address": "fa:16:3e:71:51:4a", "network": {"id": "adf44d57-3204-4e6f-b8b5-6ff0efd12ac9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-76313562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4611cc960de34328af78426d316c4227", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e3cb5-e3", "ovs_interfaceid": "2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.734799] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Releasing lock "refresh_cache-6fbd649d-1fce-440f-9911-09b74df51489" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.735127] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Instance network_info: |[{"id": "2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b", "address": "fa:16:3e:71:51:4a", "network": {"id": "adf44d57-3204-4e6f-b8b5-6ff0efd12ac9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-76313562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4611cc960de34328af78426d316c4227", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e3cb5-e3", "ovs_interfaceid": "2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1209.735593] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:51:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.742982] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Creating folder: Project (4611cc960de34328af78426d316c4227). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1209.743561] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f77c4c02-3204-4050-a7c3-83a9178ebed0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.755177] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Created folder: Project (4611cc960de34328af78426d316c4227) in parent group-v847048. [ 1209.755373] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Creating folder: Instances. Parent ref: group-v847119. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1209.755619] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-603a9d82-04f0-4399-bfa3-43ff5572e0b2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.764671] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Created folder: Instances in parent group-v847119. [ 1209.764895] env[61855]: DEBUG oslo.service.loopingcall [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1209.765085] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1209.765281] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05fcc428-1130-4b44-b91c-759ff3bebb41 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.783107] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.783107] env[61855]: value = "task-4302891" [ 1209.783107] env[61855]: _type = "Task" [ 1209.783107] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.791163] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302891, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.293427] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302891, 'name': CreateVM_Task, 'duration_secs': 0.433058} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.294173] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1210.305735] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.306045] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.306510] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1210.306894] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02ba0786-8a35-4fba-bfe9-560af18c362b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.312878] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Waiting for the task: (returnval){ [ 1210.312878] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]526bbf46-e452-1fc9-39b3-410fd53e82d4" [ 1210.312878] env[61855]: _type = "Task" [ 1210.312878] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.324370] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]526bbf46-e452-1fc9-39b3-410fd53e82d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.823635] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.823939] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1210.824513] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.524616] env[61855]: DEBUG nova.compute.manager [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Received event network-changed-2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1211.524869] env[61855]: DEBUG nova.compute.manager [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Refreshing instance network info cache due to event network-changed-2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1211.525232] env[61855]: DEBUG oslo_concurrency.lockutils [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] Acquiring lock "refresh_cache-6fbd649d-1fce-440f-9911-09b74df51489" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.525411] env[61855]: DEBUG oslo_concurrency.lockutils [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] Acquired lock "refresh_cache-6fbd649d-1fce-440f-9911-09b74df51489" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.525643] env[61855]: DEBUG nova.network.neutron [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Refreshing network info cache for port 2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1211.835513] env[61855]: DEBUG nova.network.neutron [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Updated VIF entry in instance network info cache for port 2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1211.835959] env[61855]: DEBUG nova.network.neutron [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Updating instance_info_cache with network_info: [{"id": "2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b", "address": "fa:16:3e:71:51:4a", "network": {"id": "adf44d57-3204-4e6f-b8b5-6ff0efd12ac9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-76313562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4611cc960de34328af78426d316c4227", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d6e3cb5-e3", "ovs_interfaceid": "2d6e3cb5-e3b6-4fc0-91a1-0ed6f074f02b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.848017] env[61855]: DEBUG oslo_concurrency.lockutils [req-ebf18b5e-62c5-4b0d-af5f-ed1731ea60c5 req-283d3182-de8c-4652-bdec-c4612703e8f5 service nova] Releasing lock "refresh_cache-6fbd649d-1fce-440f-9911-09b74df51489" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.228827] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2716f3c9-6e79-4628-b846-2f9f93420093 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "4bf2bcb4-fc3c-4051-9e38-128d65bd1c82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.229354] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2716f3c9-6e79-4628-b846-2f9f93420093 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "4bf2bcb4-fc3c-4051-9e38-128d65bd1c82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.005443] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "6fbd649d-1fce-440f-9911-09b74df51489" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.490721] env[61855]: DEBUG oslo_concurrency.lockutils [None req-118829e0-8405-4900-a386-900d0c015ee2 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Acquiring lock "2b747db5-efe7-431d-8d6c-58fa54e2c111" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.490721] env[61855]: DEBUG oslo_concurrency.lockutils [None req-118829e0-8405-4900-a386-900d0c015ee2 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "2b747db5-efe7-431d-8d6c-58fa54e2c111" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.920593] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.923412] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.923645] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1243.923824] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1243.967979] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.968195] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.968361] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.968497] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.968621] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.968743] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.968864] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.968988] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.969131] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.969253] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1243.969372] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1244.924414] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.924688] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.924806] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1245.924445] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.925086] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.919576] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.951057] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.962343] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.962343] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.962343] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.962343] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1246.962873] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6941e36-0fc4-46cd-85ab-838a86ce413e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.971830] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc90b1bf-ff3f-42bf-9c9e-14bca09eab51 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.986209] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e02c31-2a39-4e26-984d-9dc05cd01179 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.992676] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e200398-9218-4011-bc9b-6372836c6983 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.022704] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180570MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1247.022864] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.023074] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.099667] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.099909] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.100084] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.100272] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.100450] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.100610] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.100801] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.100982] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.101182] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.101717] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1247.113840] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 44f7b0f2-7585-4def-98d2-e34039db404c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.140043] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.149184] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.165138] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.177286] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.187682] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.198104] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6a976d89-a637-4bcc-83f3-fd509b5bad0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.207493] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b5ad73e6-2c48-41c4-85f9-3b9f74afb983 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.218007] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.239233] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.254604] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.277980] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 07bb6cf5-b125-4e08-a7fc-9b1c358d7e17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.290909] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bf2bcb4-fc3c-4051-9e38-128d65bd1c82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.314029] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2b747db5-efe7-431d-8d6c-58fa54e2c111 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1247.314029] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1247.314029] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1247.395486] env[61855]: DEBUG oslo_concurrency.lockutils [None req-4dff8e64-2c3c-424d-bbe2-4f2edd2b3d00 tempest-ServersNegativeTestMultiTenantJSON-1424545647 tempest-ServersNegativeTestMultiTenantJSON-1424545647-project-member] Acquiring lock "13d55fae-9923-4451-961a-f75ef39f7548" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.396415] env[61855]: DEBUG oslo_concurrency.lockutils [None req-4dff8e64-2c3c-424d-bbe2-4f2edd2b3d00 tempest-ServersNegativeTestMultiTenantJSON-1424545647 tempest-ServersNegativeTestMultiTenantJSON-1424545647-project-member] Lock "13d55fae-9923-4451-961a-f75ef39f7548" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.634837] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7d9db3-cb5d-45db-8ebd-938bb06fc165 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.642354] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbbf7ac-f2aa-4bea-bb9f-d8806bf26ad8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.672855] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e52ee5d-c5ec-4c5a-afb5-500d441be029 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.680048] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fe9dbf-815b-463f-a10e-366c6ddb7e1d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.694652] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.703766] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1247.723146] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1247.723318] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.700s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.703060] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.703352] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.809731] env[61855]: WARNING oslo_vmware.rw_handles [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1253.809731] env[61855]: ERROR oslo_vmware.rw_handles [ 1253.810371] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1253.811916] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1253.812177] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Copying Virtual Disk [datastore2] vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/d1f1c348-5c29-41cf-828d-d9994f4acb26/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1253.812459] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5fb90e9-eec3-40ad-908d-41dde9791dd4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.822074] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1253.822074] env[61855]: value = "task-4302892" [ 1253.822074] env[61855]: _type = "Task" [ 1253.822074] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.829847] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.332329] env[61855]: DEBUG oslo_vmware.exceptions [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1254.332611] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.333191] env[61855]: ERROR nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1254.333191] env[61855]: Faults: ['InvalidArgument'] [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Traceback (most recent call last): [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] yield resources [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self.driver.spawn(context, instance, image_meta, [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self._fetch_image_if_missing(context, vi) [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] image_cache(vi, tmp_image_ds_loc) [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] vm_util.copy_virtual_disk( [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] session._wait_for_task(vmdk_copy_task) [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] return self.wait_for_task(task_ref) [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] return evt.wait() [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] result = hub.switch() [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] return self.greenlet.switch() [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self.f(*self.args, **self.kw) [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] raise exceptions.translate_fault(task_info.error) [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Faults: ['InvalidArgument'] [ 1254.333191] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] [ 1254.333939] env[61855]: INFO nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Terminating instance [ 1254.335194] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.335389] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.336062] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1254.336253] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1254.336484] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2368c3ec-bff8-447c-8b7e-2fa545eb06f6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.338982] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941f09f5-e6d9-4b5d-a7ba-7b9d74eee74b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.345446] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1254.345679] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3333baf4-669c-4656-b4bc-61dd13da09f0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.347995] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1254.348193] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1254.349219] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43232083-237c-4d81-a638-5ceb8b010bde {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.354044] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Waiting for the task: (returnval){ [ 1254.354044] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52ff678c-e882-7540-0ba4-e24806583da7" [ 1254.354044] env[61855]: _type = "Task" [ 1254.354044] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.361217] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52ff678c-e882-7540-0ba4-e24806583da7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.420633] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1254.420935] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1254.421175] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleting the datastore file [datastore2] 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1254.421391] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07d46f00-1438-4cf3-a3c4-ff44f80d8a0d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.427839] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1254.427839] env[61855]: value = "task-4302894" [ 1254.427839] env[61855]: _type = "Task" [ 1254.427839] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.436878] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.866414] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1254.866414] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Creating directory with path [datastore2] vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.866414] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad183ded-752c-4bac-9a1a-d76998054819 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.878024] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Created directory with path [datastore2] vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1254.878024] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Fetch image to [datastore2] vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1254.878024] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1254.878024] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86439a0-e247-4d72-b77c-7004f0b05f53 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.883970] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e06ac1d-a83c-4db8-b6f4-050acec6bb86 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.902124] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e172cdfe-e888-45ed-896f-0cf8c36d3b39 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.935550] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6ff7d8-b21f-4a39-b209-3efffc87e6ce {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.944109] env[61855]: DEBUG oslo_vmware.api [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066687} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.946303] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.946660] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1254.946955] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1254.947269] env[61855]: INFO nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1254.949440] env[61855]: DEBUG nova.compute.claims [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1254.949734] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.953017] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.953017] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d2d49f7f-4389-4608-89d3-4d9c964d6fe8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.979028] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1255.149053] env[61855]: DEBUG oslo_vmware.rw_handles [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1255.207462] env[61855]: DEBUG oslo_vmware.rw_handles [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1255.207699] env[61855]: DEBUG oslo_vmware.rw_handles [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1255.368991] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ad8e58-8549-4ed2-9784-b1b02263e8e5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.376657] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74138756-dc3e-4857-a047-fd1f93b4f07c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.407148] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c932dec-6055-456d-b59b-9d5a9df14b19 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.414064] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7242a1-33bb-42ae-a30e-2ecf5f1dfc4b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.426960] env[61855]: DEBUG nova.compute.provider_tree [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.436297] env[61855]: DEBUG nova.scheduler.client.report [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1255.452042] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.502s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.452593] env[61855]: ERROR nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1255.452593] env[61855]: Faults: ['InvalidArgument'] [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Traceback (most recent call last): [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self.driver.spawn(context, instance, image_meta, [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self._fetch_image_if_missing(context, vi) [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] image_cache(vi, tmp_image_ds_loc) [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] vm_util.copy_virtual_disk( [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] session._wait_for_task(vmdk_copy_task) [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] return self.wait_for_task(task_ref) [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] return evt.wait() [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] result = hub.switch() [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] return self.greenlet.switch() [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] self.f(*self.args, **self.kw) [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] raise exceptions.translate_fault(task_info.error) [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Faults: ['InvalidArgument'] [ 1255.452593] env[61855]: ERROR nova.compute.manager [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] [ 1255.453475] env[61855]: DEBUG nova.compute.utils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1255.454863] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Build of instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 was re-scheduled: A specified parameter was not correct: fileType [ 1255.454863] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1255.455278] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1255.455478] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1255.455655] env[61855]: DEBUG nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1255.455822] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1256.237046] env[61855]: DEBUG nova.network.neutron [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.258530] env[61855]: INFO nova.compute.manager [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Took 0.80 seconds to deallocate network for instance. [ 1256.399581] env[61855]: INFO nova.scheduler.client.report [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted allocations for instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 [ 1256.437622] env[61855]: DEBUG oslo_concurrency.lockutils [None req-fc47cc51-7362-4c07-90ee-2d41fca113fd tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 659.144s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.438660] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 461.706s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.438798] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.439656] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.440683] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.441802] env[61855]: INFO nova.compute.manager [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Terminating instance [ 1256.443796] env[61855]: DEBUG nova.compute.manager [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1256.444121] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1256.444722] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e63daf5-0d0c-4e6c-b603-d99c394e51c6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.455024] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1717a85-fbbd-4951-979b-1bf1677436c1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.468270] env[61855]: DEBUG nova.compute.manager [None req-83f8252d-ef66-4a0e-8cbd-10aa78d1186a tempest-ImagesNegativeTestJSON-297444898 tempest-ImagesNegativeTestJSON-297444898-project-member] [instance: c9608176-75bf-418f-b91d-79c6d997f543] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1256.492166] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c9d0334-be7e-466a-ab2c-cc3fbe82d756 could not be found. [ 1256.492399] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1256.492582] env[61855]: INFO nova.compute.manager [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1256.492831] env[61855]: DEBUG oslo.service.loopingcall [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1256.493089] env[61855]: DEBUG nova.compute.manager [-] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1256.493166] env[61855]: DEBUG nova.network.neutron [-] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1256.500417] env[61855]: DEBUG nova.compute.manager [None req-83f8252d-ef66-4a0e-8cbd-10aa78d1186a tempest-ImagesNegativeTestJSON-297444898 tempest-ImagesNegativeTestJSON-297444898-project-member] [instance: c9608176-75bf-418f-b91d-79c6d997f543] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1256.520547] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83f8252d-ef66-4a0e-8cbd-10aa78d1186a tempest-ImagesNegativeTestJSON-297444898 tempest-ImagesNegativeTestJSON-297444898-project-member] Lock "c9608176-75bf-418f-b91d-79c6d997f543" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.876s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.529297] env[61855]: DEBUG nova.network.neutron [-] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.531326] env[61855]: DEBUG nova.compute.manager [None req-96727970-d7e7-4a20-80c3-04d142e6b0b3 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] [instance: 78755b45-1bb8-4a3b-9c51-7408425a561f] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1256.536368] env[61855]: INFO nova.compute.manager [-] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] Took 0.04 seconds to deallocate network for instance. [ 1256.556814] env[61855]: DEBUG nova.compute.manager [None req-96727970-d7e7-4a20-80c3-04d142e6b0b3 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] [instance: 78755b45-1bb8-4a3b-9c51-7408425a561f] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1256.583926] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96727970-d7e7-4a20-80c3-04d142e6b0b3 tempest-AttachVolumeShelveTestJSON-1356003505 tempest-AttachVolumeShelveTestJSON-1356003505-project-member] Lock "78755b45-1bb8-4a3b-9c51-7408425a561f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.568s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.621219] env[61855]: DEBUG nova.compute.manager [None req-6bae17bc-d807-49c0-8a15-4becd8143752 tempest-ServerActionsTestOtherA-91012384 tempest-ServerActionsTestOtherA-91012384-project-member] [instance: 0ff5c9d8-b95d-4127-95eb-fece90efe346] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1256.650236] env[61855]: DEBUG nova.compute.manager [None req-6bae17bc-d807-49c0-8a15-4becd8143752 tempest-ServerActionsTestOtherA-91012384 tempest-ServerActionsTestOtherA-91012384-project-member] [instance: 0ff5c9d8-b95d-4127-95eb-fece90efe346] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1256.653025] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14d577fc-2301-449c-8b01-4d6bd3895d80 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.653025] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 123.370s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.653025] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c9d0334-be7e-466a-ab2c-cc3fbe82d756] During sync_power_state the instance has a pending task (deleting). Skip. [ 1256.653025] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "9c9d0334-be7e-466a-ab2c-cc3fbe82d756" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.673295] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6bae17bc-d807-49c0-8a15-4becd8143752 tempest-ServerActionsTestOtherA-91012384 tempest-ServerActionsTestOtherA-91012384-project-member] Lock "0ff5c9d8-b95d-4127-95eb-fece90efe346" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.356s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.683464] env[61855]: DEBUG nova.compute.manager [None req-192cffde-4429-4c87-aecc-f433da36fff0 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: fe3646dd-a2ac-4ccb-9761-7c9b95be690e] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1256.712123] env[61855]: DEBUG nova.compute.manager [None req-192cffde-4429-4c87-aecc-f433da36fff0 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: fe3646dd-a2ac-4ccb-9761-7c9b95be690e] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1256.738473] env[61855]: DEBUG oslo_concurrency.lockutils [None req-192cffde-4429-4c87-aecc-f433da36fff0 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "fe3646dd-a2ac-4ccb-9761-7c9b95be690e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.558s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.750083] env[61855]: DEBUG nova.compute.manager [None req-aed3b3bf-47cf-4333-b559-ef9e41e54c81 tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] [instance: 44f7b0f2-7585-4def-98d2-e34039db404c] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1256.780054] env[61855]: DEBUG nova.compute.manager [None req-aed3b3bf-47cf-4333-b559-ef9e41e54c81 tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] [instance: 44f7b0f2-7585-4def-98d2-e34039db404c] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1256.804822] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aed3b3bf-47cf-4333-b559-ef9e41e54c81 tempest-ServersTestMultiNic-29115040 tempest-ServersTestMultiNic-29115040-project-member] Lock "44f7b0f2-7585-4def-98d2-e34039db404c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.251s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.815050] env[61855]: DEBUG nova.compute.manager [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1256.898686] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.900097] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.900756] env[61855]: INFO nova.compute.claims [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1257.173537] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquiring lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.315322] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7925b174-b2e8-4412-ba44-5f92c4774753 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.323120] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9a67b5-5e36-4c28-867c-40ca5bcd2974 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.352132] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efe8d37-83af-4cdb-aaa6-bdcbaea8e033 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.358953] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be61892-8206-4484-a3ef-fcc73773c1ce {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.371609] env[61855]: DEBUG nova.compute.provider_tree [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.382120] env[61855]: DEBUG nova.scheduler.client.report [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1257.396533] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.497s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.396814] env[61855]: DEBUG nova.compute.manager [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1257.441691] env[61855]: DEBUG nova.compute.claims [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1257.441891] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.442166] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.752853] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de5cbbb-64c8-4a03-b77d-db2df5a484e6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.760331] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ea19c0-eb4a-498a-be76-9ca085c56559 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.791540] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d747a6c1-3cbc-49ae-ac73-a6693a28e3da {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.798925] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9b3087-9b08-49e9-8e6e-56d5c1c1f522 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.811833] env[61855]: DEBUG nova.compute.provider_tree [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.820809] env[61855]: DEBUG nova.scheduler.client.report [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1257.834326] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.392s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.834976] env[61855]: DEBUG nova.compute.utils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Conflict updating instance 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1257.836411] env[61855]: DEBUG nova.compute.manager [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Instance disappeared during build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 1257.836591] env[61855]: DEBUG nova.compute.manager [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1257.836947] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquiring lock "refresh_cache-96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.836947] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquired lock "refresh_cache-96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.837120] env[61855]: DEBUG nova.network.neutron [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1257.862488] env[61855]: DEBUG nova.network.neutron [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1258.036520] env[61855]: DEBUG nova.network.neutron [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.050184] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Releasing lock "refresh_cache-96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.050184] env[61855]: DEBUG nova.compute.manager [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1258.050184] env[61855]: DEBUG nova.compute.manager [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1258.050404] env[61855]: DEBUG nova.network.neutron [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1258.067254] env[61855]: DEBUG nova.network.neutron [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1258.077323] env[61855]: DEBUG nova.network.neutron [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.087310] env[61855]: INFO nova.compute.manager [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Took 0.04 seconds to deallocate network for instance. [ 1258.174917] env[61855]: INFO nova.scheduler.client.report [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Deleted allocations for instance 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2 [ 1258.175259] env[61855]: DEBUG oslo_concurrency.lockutils [None req-e8f39da6-1e1c-4b19-9be0-f38d1f872e31 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.782s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.176921] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.004s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.179021] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquiring lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.179021] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.179021] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.179894] env[61855]: INFO nova.compute.manager [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Terminating instance [ 1258.181756] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquiring lock "refresh_cache-96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.181973] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Acquired lock "refresh_cache-96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.182111] env[61855]: DEBUG nova.network.neutron [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1258.187392] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1258.257706] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.258184] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.259722] env[61855]: INFO nova.compute.claims [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1258.369784] env[61855]: DEBUG nova.network.neutron [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1258.505814] env[61855]: DEBUG nova.network.neutron [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.516378] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Releasing lock "refresh_cache-96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.516798] env[61855]: DEBUG nova.compute.manager [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1258.517016] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1258.517560] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84103852-fe6b-43a1-a43b-338eb332e6e4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.526766] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d98b6b-77f3-474b-8399-8107af8f6547 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.558793] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2 could not be found. [ 1258.559034] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1258.559167] env[61855]: INFO nova.compute.manager [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1258.559443] env[61855]: DEBUG oslo.service.loopingcall [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1258.559665] env[61855]: DEBUG nova.compute.manager [-] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1258.559760] env[61855]: DEBUG nova.network.neutron [-] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1258.580616] env[61855]: DEBUG nova.network.neutron [-] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1258.588085] env[61855]: DEBUG nova.network.neutron [-] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.597276] env[61855]: INFO nova.compute.manager [-] [instance: 96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2] Took 0.04 seconds to deallocate network for instance. [ 1258.672636] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e12eaa-cfdc-49ec-8f39-460efde397a0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.682416] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99497bb8-967d-4e95-a516-e0968d1a87b3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.716417] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef65360-675e-4deb-ae05-790dcb05a935 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.719354] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6cb00792-7a1f-4d80-9925-6a1096031ee1 tempest-InstanceActionsTestJSON-1974717681 tempest-InstanceActionsTestJSON-1974717681-project-member] Lock "96e0e305-ba2b-4fda-bdac-3ff6ab7aaaf2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.542s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.725663] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411a08cc-9857-4f4f-a4c7-fea5d7cb1399 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.739953] env[61855]: DEBUG nova.compute.provider_tree [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.747917] env[61855]: DEBUG nova.scheduler.client.report [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1258.761402] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.503s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.761854] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1258.792140] env[61855]: DEBUG nova.compute.utils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1258.794018] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Not allocating networking since 'none' was specified. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1258.805020] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1258.872948] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1258.897577] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1258.897817] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1258.898058] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1258.898205] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1258.898357] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1258.898522] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1258.898766] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1258.898860] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1258.899050] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1258.899224] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1258.899418] env[61855]: DEBUG nova.virt.hardware [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1258.900299] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4092e8c-6f69-4de3-867c-cc105adf53a2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.907914] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe5774-0e88-44cd-9c2f-eb72c394d916 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.921729] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance VIF info [] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1258.927434] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Creating folder: Project (23eeae53d19c45caab73e501e5ceb8d9). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1258.927735] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7735d916-d94d-4bd5-b484-504384ebcb92 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.937834] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Created folder: Project (23eeae53d19c45caab73e501e5ceb8d9) in parent group-v847048. [ 1258.938047] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Creating folder: Instances. Parent ref: group-v847122. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1258.938275] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d033f66-14d4-4d2f-8459-fde7059d7a12 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.946552] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Created folder: Instances in parent group-v847122. [ 1258.946782] env[61855]: DEBUG oslo.service.loopingcall [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1258.946964] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1258.947179] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0af8ba36-5f43-4ea7-bcac-9d433dd4d9f6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.963244] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1258.963244] env[61855]: value = "task-4302897" [ 1258.963244] env[61855]: _type = "Task" [ 1258.963244] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.973274] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302897, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.478289] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302897, 'name': CreateVM_Task, 'duration_secs': 0.304794} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.478556] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1259.478800] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.478969] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.479735] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1259.480023] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da2c0276-561c-4a9d-b8c9-bc11bd10a9f1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.489022] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Waiting for the task: (returnval){ [ 1259.489022] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c762c2-3ed5-1a88-77b4-e6630af75b86" [ 1259.489022] env[61855]: _type = "Task" [ 1259.489022] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.498262] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c762c2-3ed5-1a88-77b4-e6630af75b86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.996895] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.997454] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1259.997875] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1262.514862] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.515567] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.541023] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "15d55991-fc11-45a0-ac77-1c00ce55dce7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.541856] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "15d55991-fc11-45a0-ac77-1c00ce55dce7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.045920] env[61855]: DEBUG oslo_concurrency.lockutils [None req-efb2a940-07fb-4b4f-9aad-593a2b7ab420 tempest-ServerShowV257Test-639370341 tempest-ServerShowV257Test-639370341-project-member] Acquiring lock "15388773-61c4-4ca4-9f61-aec7c22f4081" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.046238] env[61855]: DEBUG oslo_concurrency.lockutils [None req-efb2a940-07fb-4b4f-9aad-593a2b7ab420 tempest-ServerShowV257Test-639370341 tempest-ServerShowV257Test-639370341-project-member] Lock "15388773-61c4-4ca4-9f61-aec7c22f4081" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.919904] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.839844] env[61855]: WARNING oslo_vmware.rw_handles [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1304.839844] env[61855]: ERROR oslo_vmware.rw_handles [ 1304.840195] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1304.843681] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1304.843952] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Copying Virtual Disk [datastore2] vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/4094db5e-5805-407b-bbaf-5b9fc3afc910/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1304.844267] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a5e617e-f759-4582-9e47-2d36a6158f02 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.853966] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Waiting for the task: (returnval){ [ 1304.853966] env[61855]: value = "task-4302898" [ 1304.853966] env[61855]: _type = "Task" [ 1304.853966] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.861718] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Task: {'id': task-4302898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.364395] env[61855]: DEBUG oslo_vmware.exceptions [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1305.364742] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.365257] env[61855]: ERROR nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1305.365257] env[61855]: Faults: ['InvalidArgument'] [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Traceback (most recent call last): [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] yield resources [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self.driver.spawn(context, instance, image_meta, [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self._fetch_image_if_missing(context, vi) [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] image_cache(vi, tmp_image_ds_loc) [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] vm_util.copy_virtual_disk( [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] session._wait_for_task(vmdk_copy_task) [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] return self.wait_for_task(task_ref) [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] return evt.wait() [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] result = hub.switch() [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] return self.greenlet.switch() [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self.f(*self.args, **self.kw) [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] raise exceptions.translate_fault(task_info.error) [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Faults: ['InvalidArgument'] [ 1305.365257] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] [ 1305.366173] env[61855]: INFO nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Terminating instance [ 1305.367248] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.367473] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1305.367710] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c0b5436-b9e3-4a2f-9290-e3d1c1bcd336 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.369860] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1305.370067] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1305.370796] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1085fe2-8e83-4c4c-8934-649a69e95c84 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.377470] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1305.377673] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b584cc62-6c4d-4e75-a49a-fa10553ba3f4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.379795] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1305.379967] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1305.380938] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5a0e100-c747-47d6-ae34-3e9f16b4df64 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.385434] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Waiting for the task: (returnval){ [ 1305.385434] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]526731c5-e842-8a25-a63a-1be86b065646" [ 1305.385434] env[61855]: _type = "Task" [ 1305.385434] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.392327] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]526731c5-e842-8a25-a63a-1be86b065646, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.447812] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1305.448150] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1305.448417] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Deleting the datastore file [datastore2] 0d6f9828-e93a-474d-af31-f0ee6cb2149f {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1305.448759] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b21a7ba-69ec-4ee4-9054-4ecec4639764 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.455247] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Waiting for the task: (returnval){ [ 1305.455247] env[61855]: value = "task-4302900" [ 1305.455247] env[61855]: _type = "Task" [ 1305.455247] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.463092] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Task: {'id': task-4302900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.896149] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1305.896493] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Creating directory with path [datastore2] vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1305.896805] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cdaf820-1909-454f-b0b7-bbc59a801d6a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.908769] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Created directory with path [datastore2] vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1305.908969] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Fetch image to [datastore2] vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1305.909157] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1305.909991] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7061c3a8-ad15-45f8-8c70-67e6201e3575 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.916598] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5c0791-7cd9-4f58-91a4-5b9ecbc5103f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.925356] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.925509] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1305.925635] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1305.928215] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb71ca3-28e7-4770-a84e-d7914ae63613 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.967992] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.968224] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.968291] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.968419] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.968543] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.968665] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.968785] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.968925] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.969058] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.969178] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1305.969296] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1305.970009] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1279cd0b-bd9c-4b72-8412-075c5eb281c7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.972825] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.972975] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1305.978167] env[61855]: DEBUG oslo_vmware.api [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Task: {'id': task-4302900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07462} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.979662] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1305.979856] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1305.980045] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1305.980224] env[61855]: INFO nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1305.982408] env[61855]: DEBUG nova.compute.claims [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1305.982582] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.982797] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.985473] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d01cbfc9-d1e3-45e9-9899-ee9e5ae6b4d7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.011652] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1306.163678] env[61855]: DEBUG oslo_vmware.rw_handles [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1306.223292] env[61855]: DEBUG oslo_vmware.rw_handles [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1306.223514] env[61855]: DEBUG oslo_vmware.rw_handles [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1306.393158] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e548b585-90c2-46b0-b672-950c59729dd1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.400776] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8772f37b-b14a-4585-8124-b6cca2c05547 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.430990] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfeffd79-9f9f-433f-bd27-c62fa897673f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.437861] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d45a0e6-cecc-416a-a4ce-4878aa56076a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.450707] env[61855]: DEBUG nova.compute.provider_tree [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1306.460704] env[61855]: DEBUG nova.scheduler.client.report [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1306.478533] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.496s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.479054] env[61855]: ERROR nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1306.479054] env[61855]: Faults: ['InvalidArgument'] [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Traceback (most recent call last): [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self.driver.spawn(context, instance, image_meta, [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self._fetch_image_if_missing(context, vi) [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] image_cache(vi, tmp_image_ds_loc) [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] vm_util.copy_virtual_disk( [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] session._wait_for_task(vmdk_copy_task) [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] return self.wait_for_task(task_ref) [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] return evt.wait() [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] result = hub.switch() [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] return self.greenlet.switch() [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] self.f(*self.args, **self.kw) [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] raise exceptions.translate_fault(task_info.error) [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Faults: ['InvalidArgument'] [ 1306.479054] env[61855]: ERROR nova.compute.manager [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] [ 1306.479872] env[61855]: DEBUG nova.compute.utils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1306.481423] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Build of instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f was re-scheduled: A specified parameter was not correct: fileType [ 1306.481423] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1306.481804] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1306.481997] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1306.482183] env[61855]: DEBUG nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1306.482348] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1306.879032] env[61855]: DEBUG nova.network.neutron [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.901128] env[61855]: INFO nova.compute.manager [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Took 0.42 seconds to deallocate network for instance. [ 1306.926410] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.926659] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.006461] env[61855]: INFO nova.scheduler.client.report [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Deleted allocations for instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f [ 1307.027538] env[61855]: DEBUG oslo_concurrency.lockutils [None req-71ead566-b556-46bd-a81c-96c8c01d85c8 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 685.707s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.028802] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 486.733s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.029158] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Acquiring lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.029418] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.029604] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.031895] env[61855]: INFO nova.compute.manager [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Terminating instance [ 1307.033630] env[61855]: DEBUG nova.compute.manager [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1307.033853] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1307.034657] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe61caef-7be7-48b7-9d59-e77510affd79 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.041855] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1307.048833] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4a1ec5-be26-4a10-9521-712d17bf735e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.079719] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d6f9828-e93a-474d-af31-f0ee6cb2149f could not be found. [ 1307.079719] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1307.079719] env[61855]: INFO nova.compute.manager [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1307.080108] env[61855]: DEBUG oslo.service.loopingcall [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.082423] env[61855]: DEBUG nova.compute.manager [-] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1307.082564] env[61855]: DEBUG nova.network.neutron [-] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1307.097072] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.097421] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.099044] env[61855]: INFO nova.compute.claims [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1307.121884] env[61855]: DEBUG nova.network.neutron [-] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.139036] env[61855]: INFO nova.compute.manager [-] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] Took 0.06 seconds to deallocate network for instance. [ 1307.233291] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ac277672-9f69-4ab2-b6f0-8fc9b915bc72 tempest-ServerMetadataNegativeTestJSON-580405815 tempest-ServerMetadataNegativeTestJSON-580405815-project-member] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.234165] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 173.951s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.234959] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0d6f9828-e93a-474d-af31-f0ee6cb2149f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1307.234959] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "0d6f9828-e93a-474d-af31-f0ee6cb2149f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.437566] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da32a1c-6b98-4820-8c56-b996fa9fd460 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.445252] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5ce76c-2bc1-499f-bbcc-9aedfd7c7339 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.474267] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6db4bb-9db7-4a3b-9146-3c699aacff22 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.481044] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c0c718-a6f3-4f34-833c-3aaa4c79add8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.493748] env[61855]: DEBUG nova.compute.provider_tree [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1307.502538] env[61855]: DEBUG nova.scheduler.client.report [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1307.516696] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.419s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.517219] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1307.558058] env[61855]: DEBUG nova.compute.utils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1307.559796] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1307.559971] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1307.572929] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1307.615009] env[61855]: DEBUG nova.policy [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feb42cbe701d4c1fb8a3e64cecc45c5c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f6c53d9d7314626ae755a7fbcc6f479', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1307.634358] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1307.658628] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1307.658893] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1307.659074] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1307.659263] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1307.659415] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1307.659564] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1307.659773] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1307.659939] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1307.660205] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1307.660404] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1307.660587] env[61855]: DEBUG nova.virt.hardware [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1307.661553] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abeadb6-dc32-4b83-9515-ea61b76569b7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.669480] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285d86c1-523c-466f-8f42-5f3d6341e83e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.925562] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.951589] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Successfully created port: 71a93b24-eee9-43a5-aacc-2efb7c2ee656 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1308.846581] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Successfully updated port: 71a93b24-eee9-43a5-aacc-2efb7c2ee656 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1308.860656] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "refresh_cache-accbccfe-3858-4a4c-b47b-3f12976c8c20" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.862721] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquired lock "refresh_cache-accbccfe-3858-4a4c-b47b-3f12976c8c20" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.862916] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1308.924582] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1308.934708] env[61855]: DEBUG nova.compute.manager [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Received event network-vif-plugged-71a93b24-eee9-43a5-aacc-2efb7c2ee656 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1308.934939] env[61855]: DEBUG oslo_concurrency.lockutils [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] Acquiring lock "accbccfe-3858-4a4c-b47b-3f12976c8c20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.935166] env[61855]: DEBUG oslo_concurrency.lockutils [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.935339] env[61855]: DEBUG oslo_concurrency.lockutils [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1308.935510] env[61855]: DEBUG nova.compute.manager [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] No waiting events found dispatching network-vif-plugged-71a93b24-eee9-43a5-aacc-2efb7c2ee656 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1308.935676] env[61855]: WARNING nova.compute.manager [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Received unexpected event network-vif-plugged-71a93b24-eee9-43a5-aacc-2efb7c2ee656 for instance with vm_state building and task_state spawning. [ 1308.935878] env[61855]: DEBUG nova.compute.manager [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Received event network-changed-71a93b24-eee9-43a5-aacc-2efb7c2ee656 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1308.937422] env[61855]: DEBUG nova.compute.manager [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Refreshing instance network info cache due to event network-changed-71a93b24-eee9-43a5-aacc-2efb7c2ee656. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1308.937542] env[61855]: DEBUG oslo_concurrency.lockutils [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] Acquiring lock "refresh_cache-accbccfe-3858-4a4c-b47b-3f12976c8c20" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.938899] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1308.941578] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.941778] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.941946] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1308.942110] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1308.943163] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71b6138-6ddb-4ea8-b976-05d0a0aa85bb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.952572] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0888a63b-37a9-4e7f-b4bb-fe279e5131bd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.966746] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccb52f7-02db-4b77-af88-4112a25dc865 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.974800] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102f50eb-13cb-4d49-b584-11f53f0b7e3e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.004968] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180673MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1309.004968] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.005118] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.081655] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.081655] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.081792] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.081792] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.081912] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.082050] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.082171] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.082288] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.082403] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.082520] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1309.097767] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.114564] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.125864] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6a976d89-a637-4bcc-83f3-fd509b5bad0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.132749] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Updating instance_info_cache with network_info: [{"id": "71a93b24-eee9-43a5-aacc-2efb7c2ee656", "address": "fa:16:3e:ad:bf:97", "network": {"id": "7d6b7eec-6dc3-418e-aef8-9566635cdcc5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-885395669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2f6c53d9d7314626ae755a7fbcc6f479", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a93b24-ee", "ovs_interfaceid": "71a93b24-eee9-43a5-aacc-2efb7c2ee656", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.135675] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b5ad73e6-2c48-41c4-85f9-3b9f74afb983 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.146707] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.148072] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Releasing lock "refresh_cache-accbccfe-3858-4a4c-b47b-3f12976c8c20" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.148348] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Instance network_info: |[{"id": "71a93b24-eee9-43a5-aacc-2efb7c2ee656", "address": "fa:16:3e:ad:bf:97", "network": {"id": "7d6b7eec-6dc3-418e-aef8-9566635cdcc5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-885395669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2f6c53d9d7314626ae755a7fbcc6f479", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a93b24-ee", "ovs_interfaceid": "71a93b24-eee9-43a5-aacc-2efb7c2ee656", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1309.148634] env[61855]: DEBUG oslo_concurrency.lockutils [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] Acquired lock "refresh_cache-accbccfe-3858-4a4c-b47b-3f12976c8c20" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.148821] env[61855]: DEBUG nova.network.neutron [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Refreshing network info cache for port 71a93b24-eee9-43a5-aacc-2efb7c2ee656 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1309.149790] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:bf:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab0428e-1be7-475e-80e3-1f0aa08d4f86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71a93b24-eee9-43a5-aacc-2efb7c2ee656', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1309.157247] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Creating folder: Project (2f6c53d9d7314626ae755a7fbcc6f479). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1309.160442] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c293aa2f-10a0-44b8-b382-51bfbf0a1281 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.162694] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.172631] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.177220] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Created folder: Project (2f6c53d9d7314626ae755a7fbcc6f479) in parent group-v847048. [ 1309.177584] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Creating folder: Instances. Parent ref: group-v847125. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1309.177660] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5803e6a-24f1-4fbf-874e-d82431de37e9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.183376] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 07bb6cf5-b125-4e08-a7fc-9b1c358d7e17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.186744] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Created folder: Instances in parent group-v847125. [ 1309.186982] env[61855]: DEBUG oslo.service.loopingcall [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.187387] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1309.187607] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c10f9721-780a-4ac5-8d02-15e672566268 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.204621] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bf2bcb4-fc3c-4051-9e38-128d65bd1c82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.213026] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1309.213026] env[61855]: value = "task-4302903" [ 1309.213026] env[61855]: _type = "Task" [ 1309.213026] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.216219] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2b747db5-efe7-431d-8d6c-58fa54e2c111 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.220606] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302903, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.228234] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 13d55fae-9923-4451-961a-f75ef39f7548 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.245371] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.259288] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 15d55991-fc11-45a0-ac77-1c00ce55dce7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.279795] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 15388773-61c4-4ca4-9f61-aec7c22f4081 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1309.280104] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1309.280278] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1309.494742] env[61855]: DEBUG nova.network.neutron [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Updated VIF entry in instance network info cache for port 71a93b24-eee9-43a5-aacc-2efb7c2ee656. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1309.495141] env[61855]: DEBUG nova.network.neutron [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Updating instance_info_cache with network_info: [{"id": "71a93b24-eee9-43a5-aacc-2efb7c2ee656", "address": "fa:16:3e:ad:bf:97", "network": {"id": "7d6b7eec-6dc3-418e-aef8-9566635cdcc5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-885395669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2f6c53d9d7314626ae755a7fbcc6f479", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a93b24-ee", "ovs_interfaceid": "71a93b24-eee9-43a5-aacc-2efb7c2ee656", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.506628] env[61855]: DEBUG oslo_concurrency.lockutils [req-b41577c6-e5f8-4d20-bd0d-eb00cc0a65bc req-19999487-9489-494f-a6a8-46d9df35d978 service nova] Releasing lock "refresh_cache-accbccfe-3858-4a4c-b47b-3f12976c8c20" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.591181] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d41b71-e776-4c41-8ece-750db9a0277d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.598552] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f607000f-5e72-47b3-816b-e398b0baca70 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.628833] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792ab8a9-86e9-4740-8fbb-834413bd5ea8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.636012] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca81300-df63-4892-bd36-f507d20fdef5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.649680] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1309.658333] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1309.673511] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1309.673511] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.668s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.722958] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302903, 'name': CreateVM_Task, 'duration_secs': 0.294954} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.722958] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1309.722958] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.722958] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.722958] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1309.722958] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dae4244-e8dc-46ec-9bd5-127978cdd326 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.727360] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Waiting for the task: (returnval){ [ 1309.727360] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a3be71-36b3-2059-161c-2592ea97955d" [ 1309.727360] env[61855]: _type = "Task" [ 1309.727360] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.735458] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a3be71-36b3-2059-161c-2592ea97955d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.238377] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.238680] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1310.238840] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.673112] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.924581] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.561327] env[61855]: WARNING oslo_vmware.rw_handles [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1352.561327] env[61855]: ERROR oslo_vmware.rw_handles [ 1352.561986] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1352.563997] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1352.564279] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Copying Virtual Disk [datastore2] vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/c147cf57-004f-4e94-ad84-524ec02c9642/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1352.564596] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8eb79a89-7578-490e-afbb-0e9e0bde0bb7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.573536] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Waiting for the task: (returnval){ [ 1352.573536] env[61855]: value = "task-4302904" [ 1352.573536] env[61855]: _type = "Task" [ 1352.573536] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.581859] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Task: {'id': task-4302904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.084495] env[61855]: DEBUG oslo_vmware.exceptions [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1353.084771] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.085379] env[61855]: ERROR nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1353.085379] env[61855]: Faults: ['InvalidArgument'] [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Traceback (most recent call last): [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] yield resources [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self.driver.spawn(context, instance, image_meta, [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self._fetch_image_if_missing(context, vi) [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] image_cache(vi, tmp_image_ds_loc) [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] vm_util.copy_virtual_disk( [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] session._wait_for_task(vmdk_copy_task) [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] return self.wait_for_task(task_ref) [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] return evt.wait() [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] result = hub.switch() [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] return self.greenlet.switch() [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self.f(*self.args, **self.kw) [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] raise exceptions.translate_fault(task_info.error) [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Faults: ['InvalidArgument'] [ 1353.085379] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] [ 1353.086311] env[61855]: INFO nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Terminating instance [ 1353.087389] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.087608] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1353.087851] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a2ae2ea-b6c5-430b-8ca0-fcb9b770fe3a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.090121] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1353.090323] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1353.091066] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc4fe02-67b7-4de2-8684-55ed1d2efff6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.099220] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1353.099435] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ff25ea8-9898-408b-bb86-248a2e02ea4f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.101662] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1353.101834] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1353.102792] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ecdb32c-640a-478d-96d0-c3a8597e2621 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.107430] env[61855]: DEBUG oslo_vmware.api [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for the task: (returnval){ [ 1353.107430] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52225513-c403-9a1f-47f8-85d689054e32" [ 1353.107430] env[61855]: _type = "Task" [ 1353.107430] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.114924] env[61855]: DEBUG oslo_vmware.api [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52225513-c403-9a1f-47f8-85d689054e32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.171191] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1353.171440] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1353.171576] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Deleting the datastore file [datastore2] 29ae12b6-adc9-4a25-8a89-9a88470b3818 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.171849] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ebb7815-5b7f-478a-90d2-c0d775a0de11 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.178983] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Waiting for the task: (returnval){ [ 1353.178983] env[61855]: value = "task-4302906" [ 1353.178983] env[61855]: _type = "Task" [ 1353.178983] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.187574] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Task: {'id': task-4302906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.616996] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1353.617333] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Creating directory with path [datastore2] vmware_temp/9ff7f72a-f7bb-4f57-9df5-171013247f2b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1353.617640] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d195efdc-ba2c-4ac9-950d-9c6707f0f850 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.629536] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Created directory with path [datastore2] vmware_temp/9ff7f72a-f7bb-4f57-9df5-171013247f2b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1353.629737] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Fetch image to [datastore2] vmware_temp/9ff7f72a-f7bb-4f57-9df5-171013247f2b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1353.629913] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/9ff7f72a-f7bb-4f57-9df5-171013247f2b/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1353.630727] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2446003f-7ce4-48e6-881c-55d849d80b70 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.636974] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085be67c-ed96-4237-ad6a-f6cacbc0a5fc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.645828] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ca052d-ec61-4c3c-a7cf-4a2d746fd6a3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.675383] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dccee4e-2e28-4a51-b08d-cbb0e9e40edd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.682837] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5d35d5ff-9143-4e65-adf2-443a9190f347 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.688884] env[61855]: DEBUG oslo_vmware.api [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Task: {'id': task-4302906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077026} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.689140] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.689319] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1353.689514] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1353.689692] env[61855]: INFO nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1353.691749] env[61855]: DEBUG nova.compute.claims [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1353.691920] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.692154] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.702486] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1353.869271] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.870474] env[61855]: ERROR nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = getattr(controller, method)(*args, **kwargs) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._get(image_id) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] resp, body = self.http_client.get(url, headers=header) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.request(url, 'GET', **kwargs) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._handle_response(resp) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exc.from_response(resp, resp.content) [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During handling of the above exception, another exception occurred: [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] yield resources [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self.driver.spawn(context, instance, image_meta, [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1353.870474] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._fetch_image_if_missing(context, vi) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image_fetch(context, vi, tmp_image_ds_loc) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] images.fetch_image( [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] metadata = IMAGE_API.get(context, image_ref) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return session.show(context, image_id, [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] _reraise_translated_image_exception(image_id) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise new_exc.with_traceback(exc_trace) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = getattr(controller, method)(*args, **kwargs) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._get(image_id) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] resp, body = self.http_client.get(url, headers=header) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.request(url, 'GET', **kwargs) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._handle_response(resp) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exc.from_response(resp, resp.content) [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] nova.exception.ImageNotAuthorized: Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. [ 1353.871457] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1353.871457] env[61855]: INFO nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Terminating instance [ 1353.872823] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.873057] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1353.873950] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1353.874165] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1353.874402] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e1c4400-aeaf-4502-9528-d26e5c942e53 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.876853] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f6de3f-dd63-4262-8366-b29e1d2e7d83 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.887581] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1353.887811] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25f9b26d-aa9b-4190-873b-9e45574aa800 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.890043] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1353.890217] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1353.891176] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f90b28b-cf85-4032-b19d-3d3d3dfe1d86 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.898636] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Waiting for the task: (returnval){ [ 1353.898636] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52652245-4192-c151-7de7-ee0c560e5d56" [ 1353.898636] env[61855]: _type = "Task" [ 1353.898636] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.906069] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52652245-4192-c151-7de7-ee0c560e5d56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.957477] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1353.957772] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1353.957999] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Deleting the datastore file [datastore2] 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.958333] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75f51bde-0659-447f-8afc-d876852d175e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.966259] env[61855]: DEBUG oslo_vmware.api [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for the task: (returnval){ [ 1353.966259] env[61855]: value = "task-4302908" [ 1353.966259] env[61855]: _type = "Task" [ 1353.966259] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.976373] env[61855]: DEBUG oslo_vmware.api [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': task-4302908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.036368] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e614843-1ce2-4186-890a-17b36495b696 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.043856] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d53e8a7-5145-481e-8f4c-3d8643cd5d85 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.073290] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e5a84f-05fe-4a71-bc43-0f36b0fc88f0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.080125] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9037c8-9c51-4f05-94f6-003dc07193b4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.092922] env[61855]: DEBUG nova.compute.provider_tree [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.105035] env[61855]: DEBUG nova.scheduler.client.report [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1354.119394] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.427s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.119937] env[61855]: ERROR nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1354.119937] env[61855]: Faults: ['InvalidArgument'] [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Traceback (most recent call last): [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self.driver.spawn(context, instance, image_meta, [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self._fetch_image_if_missing(context, vi) [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] image_cache(vi, tmp_image_ds_loc) [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] vm_util.copy_virtual_disk( [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] session._wait_for_task(vmdk_copy_task) [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] return self.wait_for_task(task_ref) [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] return evt.wait() [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] result = hub.switch() [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] return self.greenlet.switch() [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] self.f(*self.args, **self.kw) [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] raise exceptions.translate_fault(task_info.error) [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Faults: ['InvalidArgument'] [ 1354.119937] env[61855]: ERROR nova.compute.manager [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] [ 1354.120896] env[61855]: DEBUG nova.compute.utils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1354.122043] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Build of instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 was re-scheduled: A specified parameter was not correct: fileType [ 1354.122043] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1354.122411] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1354.122586] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1354.122758] env[61855]: DEBUG nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1354.122922] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1354.410240] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1354.410240] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Creating directory with path [datastore2] vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1354.410470] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1152689-e1dc-4421-ab03-4c8052e89029 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.424250] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Created directory with path [datastore2] vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1354.424452] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Fetch image to [datastore2] vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1354.424619] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1354.425454] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4a02c8-881a-4f72-8c28-d4fca098f84f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.433887] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10422e19-4dda-44d6-a1a6-1e8ffe7a89ee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.443031] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d76a721-3e16-4408-8067-8689a18e0e73 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.482406] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2223e065-caaa-400e-8825-1a3d5053855c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.491053] env[61855]: DEBUG oslo_vmware.api [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Task: {'id': task-4302908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319822} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.492766] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1354.492894] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1354.493150] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1354.494728] env[61855]: INFO nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1354.497682] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d23884a7-9d71-4d56-93aa-906ee45c2ec2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.497682] env[61855]: DEBUG nova.compute.claims [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1354.497682] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.497682] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.514504] env[61855]: DEBUG nova.network.neutron [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.521382] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1354.564631] env[61855]: INFO nova.compute.manager [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Took 0.44 seconds to deallocate network for instance. [ 1354.600218] env[61855]: DEBUG oslo_vmware.rw_handles [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1354.666892] env[61855]: DEBUG oslo_vmware.rw_handles [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1354.667178] env[61855]: DEBUG oslo_vmware.rw_handles [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1354.721574] env[61855]: INFO nova.scheduler.client.report [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Deleted allocations for instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 [ 1354.742405] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6289b13-52fc-4531-bca3-a5088bbd8c0c tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 677.678s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.744339] env[61855]: DEBUG oslo_concurrency.lockutils [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 482.062s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.744618] env[61855]: DEBUG oslo_concurrency.lockutils [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Acquiring lock "29ae12b6-adc9-4a25-8a89-9a88470b3818-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.744849] env[61855]: DEBUG oslo_concurrency.lockutils [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.745034] env[61855]: DEBUG oslo_concurrency.lockutils [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.746883] env[61855]: INFO nova.compute.manager [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Terminating instance [ 1354.748657] env[61855]: DEBUG nova.compute.manager [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1354.748794] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1354.751509] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21d8622f-bc1d-4f0d-b6c7-d4087b45cddb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.760368] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2435adfe-2a70-4631-9935-5f0e1870eaff {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.774572] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1354.794846] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29ae12b6-adc9-4a25-8a89-9a88470b3818 could not be found. [ 1354.795036] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1354.795222] env[61855]: INFO nova.compute.manager [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1354.795512] env[61855]: DEBUG oslo.service.loopingcall [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.797987] env[61855]: DEBUG nova.compute.manager [-] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1354.798116] env[61855]: DEBUG nova.network.neutron [-] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1354.824884] env[61855]: DEBUG nova.network.neutron [-] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.826893] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.835602] env[61855]: INFO nova.compute.manager [-] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] Took 0.04 seconds to deallocate network for instance. [ 1354.930401] env[61855]: DEBUG oslo_concurrency.lockutils [None req-494db3e3-9a8d-4ef6-ba66-c59ac7e75672 tempest-ServerTagsTestJSON-1229943114 tempest-ServerTagsTestJSON-1229943114-project-member] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.931310] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 221.648s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.931506] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 29ae12b6-adc9-4a25-8a89-9a88470b3818] During sync_power_state the instance has a pending task (deleting). Skip. [ 1354.931694] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "29ae12b6-adc9-4a25-8a89-9a88470b3818" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.958505] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfeaeb08-4049-4c6e-95ea-3f0cb51b3ab7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.966327] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2c54bd-affe-4b0b-a6f2-8ab211df22cd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.995508] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c891818-15b7-4e12-bcd4-2d8117409aa5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.002224] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5a02d3-6b78-4e0c-a023-8ea5fb3931db {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.019303] env[61855]: DEBUG nova.compute.provider_tree [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1355.029122] env[61855]: DEBUG nova.scheduler.client.report [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1355.057201] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.559s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.058674] env[61855]: ERROR nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = getattr(controller, method)(*args, **kwargs) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._get(image_id) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] resp, body = self.http_client.get(url, headers=header) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.request(url, 'GET', **kwargs) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._handle_response(resp) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exc.from_response(resp, resp.content) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During handling of the above exception, another exception occurred: [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self.driver.spawn(context, instance, image_meta, [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._fetch_image_if_missing(context, vi) [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1355.058674] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image_fetch(context, vi, tmp_image_ds_loc) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] images.fetch_image( [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] metadata = IMAGE_API.get(context, image_ref) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return session.show(context, image_id, [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] _reraise_translated_image_exception(image_id) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise new_exc.with_traceback(exc_trace) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = getattr(controller, method)(*args, **kwargs) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._get(image_id) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] resp, body = self.http_client.get(url, headers=header) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.request(url, 'GET', **kwargs) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._handle_response(resp) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exc.from_response(resp, resp.content) [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] nova.exception.ImageNotAuthorized: Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. [ 1355.059597] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.060411] env[61855]: DEBUG nova.compute.utils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1355.061828] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.235s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.064630] env[61855]: INFO nova.compute.claims [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1355.069390] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Build of instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 was re-scheduled: Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1355.070143] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1355.070418] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1355.070689] env[61855]: DEBUG nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1355.070986] env[61855]: DEBUG nova.network.neutron [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1355.203719] env[61855]: DEBUG neutronclient.v2_0.client [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61855) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1355.206615] env[61855]: ERROR nova.compute.manager [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = getattr(controller, method)(*args, **kwargs) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._get(image_id) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] resp, body = self.http_client.get(url, headers=header) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.request(url, 'GET', **kwargs) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._handle_response(resp) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exc.from_response(resp, resp.content) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During handling of the above exception, another exception occurred: [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self.driver.spawn(context, instance, image_meta, [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._fetch_image_if_missing(context, vi) [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1355.206615] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image_fetch(context, vi, tmp_image_ds_loc) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] images.fetch_image( [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] metadata = IMAGE_API.get(context, image_ref) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return session.show(context, image_id, [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] _reraise_translated_image_exception(image_id) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise new_exc.with_traceback(exc_trace) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = getattr(controller, method)(*args, **kwargs) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._get(image_id) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] resp, body = self.http_client.get(url, headers=header) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.request(url, 'GET', **kwargs) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self._handle_response(resp) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exc.from_response(resp, resp.content) [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] nova.exception.ImageNotAuthorized: Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During handling of the above exception, another exception occurred: [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._build_and_run_instance(context, instance, image, [ 1355.207601] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exception.RescheduledException( [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] nova.exception.RescheduledException: Build of instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 was re-scheduled: Not authorized for image c2cb7e7f-87ef-46fd-89b3-fd48992318fc. [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During handling of the above exception, another exception occurred: [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] exception_handler_v20(status_code, error_body) [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise client_exc(message=error_message, [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Neutron server returns request_ids: ['req-474ecbe3-eede-4e04-bc67-f02aef128f96'] [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During handling of the above exception, another exception occurred: [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._deallocate_network(context, instance, requested_networks) [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self.network_api.deallocate_for_instance( [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] data = neutron.list_ports(**search_opts) [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.list('ports', self.ports_path, retrieve_all, [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] for r in self._pagination(collection, path, **params): [ 1355.208583] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] res = self.get(path, params=params) [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.retry_request("GET", action, body=body, [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.do_request(method, action, body=body, [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._handle_fault_response(status_code, replybody, resp) [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exception.Unauthorized() [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] nova.exception.Unauthorized: Not authorized. [ 1355.209524] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.258873] env[61855]: INFO nova.scheduler.client.report [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Deleted allocations for instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 [ 1355.285965] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0ec1dbe8-5641-4077-be85-d4c8f163fe3b tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 588.137s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.287565] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.075s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.287791] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Acquiring lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.288070] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.288317] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.291678] env[61855]: INFO nova.compute.manager [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Terminating instance [ 1355.295608] env[61855]: DEBUG nova.compute.manager [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1355.295809] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1355.296091] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-158649d9-0c74-4e88-912d-bac53f0ccb0c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.305669] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8755ba-64d5-41db-93f4-abc86b971a2d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.316142] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1355.339444] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955 could not be found. [ 1355.339662] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1355.339872] env[61855]: INFO nova.compute.manager [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1355.340161] env[61855]: DEBUG oslo.service.loopingcall [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1355.342872] env[61855]: DEBUG nova.compute.manager [-] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1355.343203] env[61855]: DEBUG nova.network.neutron [-] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1355.381734] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.445439] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977cbffe-6cfb-4cbb-ad17-17eaeedc1953 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.453998] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50593a1a-34d8-46ab-8828-9b13d0198d32 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.457024] env[61855]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61855) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1355.457292] env[61855]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-24fa28d3-ff31-483a-ac34-f1777a77e6e6'] [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1355.458553] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1355.459722] env[61855]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1355.459722] env[61855]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1355.459722] env[61855]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1355.459722] env[61855]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1355.459722] env[61855]: ERROR oslo.service.loopingcall [ 1355.459722] env[61855]: ERROR nova.compute.manager [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1355.491911] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d043e1-9690-427c-bfe6-c83397099a5b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.495666] env[61855]: ERROR nova.compute.manager [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] exception_handler_v20(status_code, error_body) [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise client_exc(message=error_message, [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Neutron server returns request_ids: ['req-24fa28d3-ff31-483a-ac34-f1777a77e6e6'] [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During handling of the above exception, another exception occurred: [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Traceback (most recent call last): [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._delete_instance(context, instance, bdms) [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._shutdown_instance(context, instance, bdms) [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._try_deallocate_network(context, instance, requested_networks) [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] with excutils.save_and_reraise_exception(): [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self.force_reraise() [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise self.value [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] _deallocate_network_with_retries() [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return evt.wait() [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1355.495666] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = hub.switch() [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.greenlet.switch() [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = func(*self.args, **self.kw) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] result = f(*args, **kwargs) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._deallocate_network( [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self.network_api.deallocate_for_instance( [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] data = neutron.list_ports(**search_opts) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.list('ports', self.ports_path, retrieve_all, [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] for r in self._pagination(collection, path, **params): [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] res = self.get(path, params=params) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.retry_request("GET", action, body=body, [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] return self.do_request(method, action, body=body, [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] ret = obj(*args, **kwargs) [ 1355.496576] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1355.497620] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] self._handle_fault_response(status_code, replybody, resp) [ 1355.497620] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1355.497620] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1355.497620] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1355.497620] env[61855]: ERROR nova.compute.manager [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] [ 1355.503387] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdacf506-cf8f-438e-80ff-a6060eb8e45f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.516833] env[61855]: DEBUG nova.compute.provider_tree [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1355.527469] env[61855]: DEBUG nova.scheduler.client.report [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1355.531178] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.244s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.532495] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 222.249s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.532689] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] During sync_power_state the instance has a pending task (deleting). Skip. [ 1355.532864] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "9c90b7a3-1a7f-4ba6-8e49-fb0efd958955" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.542071] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.480s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.542529] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1355.544877] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.164s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.546273] env[61855]: INFO nova.compute.claims [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1355.592542] env[61855]: DEBUG nova.compute.utils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1355.593733] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1355.593911] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1355.603754] env[61855]: INFO nova.compute.manager [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] [instance: 9c90b7a3-1a7f-4ba6-8e49-fb0efd958955] Successfully reverted task state from None on failure for instance. [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server [None req-5d8dc8bd-a39f-44e1-afb1-338fe6854628 tempest-MigrationsAdminTest-1207065140 tempest-MigrationsAdminTest-1207065140-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-24fa28d3-ff31-483a-ac34-f1777a77e6e6'] [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server raise self.value [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server raise self.value [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1355.608515] env[61855]: ERROR oslo_messaging.rpc.server raise self.value [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server raise self.value [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server raise self.value [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1355.609851] env[61855]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1355.611102] env[61855]: ERROR oslo_messaging.rpc.server [ 1355.612058] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1355.673077] env[61855]: DEBUG nova.policy [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5256e05619f744e988b78876f04b7286', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a6c123dab04b01868b291d2b953e75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1355.684012] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1355.712743] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1355.712983] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1355.713158] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1355.713344] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1355.713495] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1355.713645] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1355.713850] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1355.714026] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1355.714204] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1355.714373] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1355.714549] env[61855]: DEBUG nova.virt.hardware [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1355.715698] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b55c13-c728-4ee7-b293-85078caed33b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.725644] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ba95b5-8798-45d2-909b-d048e7a0875f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.895932] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cdb887-0806-41e9-97e1-abfdd30704e6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.904589] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f8511b-cbb2-4fc9-9029-c822aedf1fdf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.933565] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fc5bd9-8128-4bd0-92a8-1f6e5917d5e9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.941088] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962582a1-3ffb-4d90-9b19-e9e4a463c6b6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.954231] env[61855]: DEBUG nova.compute.provider_tree [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1355.965300] env[61855]: DEBUG nova.scheduler.client.report [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1355.979774] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.435s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.980304] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1356.013301] env[61855]: DEBUG nova.compute.utils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1356.014740] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1356.014906] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1356.024196] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1356.102292] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1356.116246] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Successfully created port: cc2a0206-5689-4fcc-8305-b1ba2d813149 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1356.133325] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1356.133573] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1356.133730] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.133915] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1356.134075] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.134224] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1356.134451] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1356.134629] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1356.134796] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1356.134955] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1356.135143] env[61855]: DEBUG nova.virt.hardware [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1356.135990] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4637d3ce-ba68-4b30-a3dc-f84784dcf2a1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.147017] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802d06dd-7ab5-46dd-ad6c-1d0a27f6d634 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.330879] env[61855]: DEBUG nova.policy [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6365552b80dc401d8a166f179d231b10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ec8ab8fc8404ec8a37780aa6e6fd40e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1356.915616] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Successfully created port: 96fd5ce3-b8ab-463a-abbc-ea5615156890 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1357.331391] env[61855]: DEBUG nova.compute.manager [req-ca01c1be-65ae-421a-9052-9c226436c668 req-2607b1db-45f9-40d7-bf37-652b70650848 service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Received event network-vif-plugged-cc2a0206-5689-4fcc-8305-b1ba2d813149 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1357.331627] env[61855]: DEBUG oslo_concurrency.lockutils [req-ca01c1be-65ae-421a-9052-9c226436c668 req-2607b1db-45f9-40d7-bf37-652b70650848 service nova] Acquiring lock "cf66f181-60e6-43d4-a561-a32e9174448d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.331838] env[61855]: DEBUG oslo_concurrency.lockutils [req-ca01c1be-65ae-421a-9052-9c226436c668 req-2607b1db-45f9-40d7-bf37-652b70650848 service nova] Lock "cf66f181-60e6-43d4-a561-a32e9174448d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.332035] env[61855]: DEBUG oslo_concurrency.lockutils [req-ca01c1be-65ae-421a-9052-9c226436c668 req-2607b1db-45f9-40d7-bf37-652b70650848 service nova] Lock "cf66f181-60e6-43d4-a561-a32e9174448d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.332211] env[61855]: DEBUG nova.compute.manager [req-ca01c1be-65ae-421a-9052-9c226436c668 req-2607b1db-45f9-40d7-bf37-652b70650848 service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] No waiting events found dispatching network-vif-plugged-cc2a0206-5689-4fcc-8305-b1ba2d813149 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1357.332380] env[61855]: WARNING nova.compute.manager [req-ca01c1be-65ae-421a-9052-9c226436c668 req-2607b1db-45f9-40d7-bf37-652b70650848 service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Received unexpected event network-vif-plugged-cc2a0206-5689-4fcc-8305-b1ba2d813149 for instance with vm_state building and task_state spawning. [ 1357.503704] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Successfully updated port: cc2a0206-5689-4fcc-8305-b1ba2d813149 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1357.527669] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "refresh_cache-cf66f181-60e6-43d4-a561-a32e9174448d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.527824] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "refresh_cache-cf66f181-60e6-43d4-a561-a32e9174448d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.527978] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1357.579008] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1357.941562] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Updating instance_info_cache with network_info: [{"id": "cc2a0206-5689-4fcc-8305-b1ba2d813149", "address": "fa:16:3e:23:fe:03", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc2a0206-56", "ovs_interfaceid": "cc2a0206-5689-4fcc-8305-b1ba2d813149", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.961931] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "refresh_cache-cf66f181-60e6-43d4-a561-a32e9174448d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.962272] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Instance network_info: |[{"id": "cc2a0206-5689-4fcc-8305-b1ba2d813149", "address": "fa:16:3e:23:fe:03", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc2a0206-56", "ovs_interfaceid": "cc2a0206-5689-4fcc-8305-b1ba2d813149", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1357.962767] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:fe:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f925dc8-2145-457e-a4d4-c07117356dd0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc2a0206-5689-4fcc-8305-b1ba2d813149', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1357.971692] env[61855]: DEBUG oslo.service.loopingcall [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1357.972312] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1357.972945] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dae11aeb-73f9-41e5-9202-bac0b9591ccd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.995654] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1357.995654] env[61855]: value = "task-4302909" [ 1357.995654] env[61855]: _type = "Task" [ 1357.995654] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.004783] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302909, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.082493] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Successfully updated port: 96fd5ce3-b8ab-463a-abbc-ea5615156890 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1358.093393] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "refresh_cache-242e1a24-3f5b-4509-8677-e5a4c7883605" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.093556] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "refresh_cache-242e1a24-3f5b-4509-8677-e5a4c7883605" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.093711] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1358.136146] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1358.506212] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302909, 'name': CreateVM_Task, 'duration_secs': 0.296688} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.506424] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1358.507111] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.507324] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.507657] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1358.507909] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bc6b826-d9bc-4ced-a1ba-4f64af6d6494 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.512291] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1358.512291] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b2732c-a7c1-836a-84aa-21eb38a0a3e1" [ 1358.512291] env[61855]: _type = "Task" [ 1358.512291] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.519629] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b2732c-a7c1-836a-84aa-21eb38a0a3e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.588192] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Updating instance_info_cache with network_info: [{"id": "96fd5ce3-b8ab-463a-abbc-ea5615156890", "address": "fa:16:3e:1b:83:66", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fd5ce3-b8", "ovs_interfaceid": "96fd5ce3-b8ab-463a-abbc-ea5615156890", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.604016] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "refresh_cache-242e1a24-3f5b-4509-8677-e5a4c7883605" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.604016] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Instance network_info: |[{"id": "96fd5ce3-b8ab-463a-abbc-ea5615156890", "address": "fa:16:3e:1b:83:66", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fd5ce3-b8", "ovs_interfaceid": "96fd5ce3-b8ab-463a-abbc-ea5615156890", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1358.604016] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:83:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96fd5ce3-b8ab-463a-abbc-ea5615156890', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1358.608964] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating folder: Project (8ec8ab8fc8404ec8a37780aa6e6fd40e). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1358.609543] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b690cebe-3e98-43aa-bccb-8364db4141be {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.620238] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created folder: Project (8ec8ab8fc8404ec8a37780aa6e6fd40e) in parent group-v847048. [ 1358.621153] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating folder: Instances. Parent ref: group-v847129. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1358.621153] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aaa4a7e-fb10-4221-855d-40919c555ba3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.630179] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created folder: Instances in parent group-v847129. [ 1358.630390] env[61855]: DEBUG oslo.service.loopingcall [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1358.630573] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1358.630774] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ba9399d-f1d6-4ead-b902-820d68294f7b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.649451] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1358.649451] env[61855]: value = "task-4302912" [ 1358.649451] env[61855]: _type = "Task" [ 1358.649451] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.658225] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302912, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.682842] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.022673] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.023112] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1359.023214] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.159917] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302912, 'name': CreateVM_Task, 'duration_secs': 0.305847} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.160202] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1359.160949] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.161155] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.161474] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1359.161733] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4413e4b-b86b-4757-a79a-53c367d77adf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.166249] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 1359.166249] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524fa4e7-485d-b7d0-e378-46a9a207859e" [ 1359.166249] env[61855]: _type = "Task" [ 1359.166249] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.173834] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524fa4e7-485d-b7d0-e378-46a9a207859e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.363179] env[61855]: DEBUG nova.compute.manager [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Received event network-changed-cc2a0206-5689-4fcc-8305-b1ba2d813149 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1359.363348] env[61855]: DEBUG nova.compute.manager [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Refreshing instance network info cache due to event network-changed-cc2a0206-5689-4fcc-8305-b1ba2d813149. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1359.363568] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Acquiring lock "refresh_cache-cf66f181-60e6-43d4-a561-a32e9174448d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.363749] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Acquired lock "refresh_cache-cf66f181-60e6-43d4-a561-a32e9174448d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.363921] env[61855]: DEBUG nova.network.neutron [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Refreshing network info cache for port cc2a0206-5689-4fcc-8305-b1ba2d813149 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1359.661161] env[61855]: DEBUG nova.network.neutron [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Updated VIF entry in instance network info cache for port cc2a0206-5689-4fcc-8305-b1ba2d813149. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1359.661526] env[61855]: DEBUG nova.network.neutron [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Updating instance_info_cache with network_info: [{"id": "cc2a0206-5689-4fcc-8305-b1ba2d813149", "address": "fa:16:3e:23:fe:03", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc2a0206-56", "ovs_interfaceid": "cc2a0206-5689-4fcc-8305-b1ba2d813149", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.673077] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Releasing lock "refresh_cache-cf66f181-60e6-43d4-a561-a32e9174448d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.673465] env[61855]: DEBUG nova.compute.manager [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Received event network-vif-plugged-96fd5ce3-b8ab-463a-abbc-ea5615156890 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1359.673681] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Acquiring lock "242e1a24-3f5b-4509-8677-e5a4c7883605-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.673895] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.674077] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.674250] env[61855]: DEBUG nova.compute.manager [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] No waiting events found dispatching network-vif-plugged-96fd5ce3-b8ab-463a-abbc-ea5615156890 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1359.674422] env[61855]: WARNING nova.compute.manager [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Received unexpected event network-vif-plugged-96fd5ce3-b8ab-463a-abbc-ea5615156890 for instance with vm_state building and task_state spawning. [ 1359.674591] env[61855]: DEBUG nova.compute.manager [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Received event network-changed-96fd5ce3-b8ab-463a-abbc-ea5615156890 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1359.674749] env[61855]: DEBUG nova.compute.manager [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Refreshing instance network info cache due to event network-changed-96fd5ce3-b8ab-463a-abbc-ea5615156890. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1359.674931] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Acquiring lock "refresh_cache-242e1a24-3f5b-4509-8677-e5a4c7883605" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.675117] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Acquired lock "refresh_cache-242e1a24-3f5b-4509-8677-e5a4c7883605" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.675305] env[61855]: DEBUG nova.network.neutron [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Refreshing network info cache for port 96fd5ce3-b8ab-463a-abbc-ea5615156890 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1359.680213] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.680439] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1359.680646] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.950358] env[61855]: DEBUG nova.network.neutron [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Updated VIF entry in instance network info cache for port 96fd5ce3-b8ab-463a-abbc-ea5615156890. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1359.950711] env[61855]: DEBUG nova.network.neutron [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Updating instance_info_cache with network_info: [{"id": "96fd5ce3-b8ab-463a-abbc-ea5615156890", "address": "fa:16:3e:1b:83:66", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fd5ce3-b8", "ovs_interfaceid": "96fd5ce3-b8ab-463a-abbc-ea5615156890", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.960431] env[61855]: DEBUG oslo_concurrency.lockutils [req-fe7ba483-d5db-4cde-b926-a45805c0b2c1 req-0668fcea-81e3-4b82-8a01-37ad7e3f84fc service nova] Releasing lock "refresh_cache-242e1a24-3f5b-4509-8677-e5a4c7883605" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.919266] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1366.923894] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.924176] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.924447] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1367.924483] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1367.954145] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954145] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954145] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954145] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954145] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954145] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954145] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954439] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954439] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954501] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1367.954640] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1367.955078] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.955268] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.955400] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1368.924422] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1368.952936] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.923979] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.936303] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.936571] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.936702] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.937232] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1369.938571] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7268255c-9c0d-4a05-a346-ba977b5db502 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.947659] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80de7a2a-b009-4dd5-803d-fdea248de715 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.962882] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4468541-5e69-4b27-8602-75003abf09df {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.969287] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684ebf18-918d-4198-a45f-fa39faeca635 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.997919] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180682MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1369.998077] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.998268] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.086191] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.086361] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.086493] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.086617] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.086738] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.086861] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.086981] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.087115] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.087235] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.087406] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1370.105620] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6a976d89-a637-4bcc-83f3-fd509b5bad0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.117457] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance b5ad73e6-2c48-41c4-85f9-3b9f74afb983 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.129713] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.144734] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.156096] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.167802] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 07bb6cf5-b125-4e08-a7fc-9b1c358d7e17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.178330] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 4bf2bcb4-fc3c-4051-9e38-128d65bd1c82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.191184] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2b747db5-efe7-431d-8d6c-58fa54e2c111 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.202665] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 13d55fae-9923-4451-961a-f75ef39f7548 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.215016] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.224789] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 15d55991-fc11-45a0-ac77-1c00ce55dce7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.236195] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 15388773-61c4-4ca4-9f61-aec7c22f4081 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1370.237028] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1370.237028] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1370.542850] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a77139-6e0d-45b6-8a9f-88e18ccd668b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.551774] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c1d1c4-99b7-4e4c-bc92-ae0aabe098ef {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.586781] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6b91eb-1681-4a96-b2f8-64cd75073291 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.594810] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def9761c-ab1c-4c54-b427-665420c8350b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.607793] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.615902] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1370.631248] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1370.631438] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.633s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.632443] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.924679] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.990247] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "accbccfe-3858-4a4c-b47b-3f12976c8c20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.538412] env[61855]: DEBUG oslo_concurrency.lockutils [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "cf66f181-60e6-43d4-a561-a32e9174448d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.288566] env[61855]: DEBUG oslo_concurrency.lockutils [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "242e1a24-3f5b-4509-8677-e5a4c7883605" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.031066] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.031435] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.579898] env[61855]: WARNING oslo_vmware.rw_handles [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1402.579898] env[61855]: ERROR oslo_vmware.rw_handles [ 1402.580605] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1402.582202] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1402.582459] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Copying Virtual Disk [datastore2] vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/c2e51ea1-3955-4c1a-a28a-c6d2ba06200e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1402.582753] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a59dfbd-63ce-46bf-964e-8fdb329b6c70 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.590076] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Waiting for the task: (returnval){ [ 1402.590076] env[61855]: value = "task-4302913" [ 1402.590076] env[61855]: _type = "Task" [ 1402.590076] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.597603] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Task: {'id': task-4302913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.102038] env[61855]: DEBUG oslo_vmware.exceptions [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1403.102181] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.102663] env[61855]: ERROR nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1403.102663] env[61855]: Faults: ['InvalidArgument'] [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Traceback (most recent call last): [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] yield resources [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self.driver.spawn(context, instance, image_meta, [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self._fetch_image_if_missing(context, vi) [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] image_cache(vi, tmp_image_ds_loc) [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] vm_util.copy_virtual_disk( [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] session._wait_for_task(vmdk_copy_task) [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] return self.wait_for_task(task_ref) [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] return evt.wait() [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] result = hub.switch() [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] return self.greenlet.switch() [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self.f(*self.args, **self.kw) [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] raise exceptions.translate_fault(task_info.error) [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Faults: ['InvalidArgument'] [ 1403.102663] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] [ 1403.103737] env[61855]: INFO nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Terminating instance [ 1403.104511] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.104719] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1403.105340] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1403.105528] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1403.105770] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-faed2896-886e-4952-b67d-634f73694223 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.108014] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad065f7-151c-4545-81fb-29da369b8c15 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.114541] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1403.114746] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f715ec03-c200-44b2-93f2-8ce0bb7e3754 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.116871] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1403.117060] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1403.118038] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-267f356f-90d5-48a0-a808-5c1a069010b2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.122458] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Waiting for the task: (returnval){ [ 1403.122458] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]528860ac-4303-40dc-98f3-6c70e9f69c8f" [ 1403.122458] env[61855]: _type = "Task" [ 1403.122458] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.129316] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]528860ac-4303-40dc-98f3-6c70e9f69c8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.181203] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1403.181390] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1403.182060] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Deleting the datastore file [datastore2] bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.182060] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-347cacb2-abb2-4ad3-ada9-02f31edac442 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.187581] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Waiting for the task: (returnval){ [ 1403.187581] env[61855]: value = "task-4302915" [ 1403.187581] env[61855]: _type = "Task" [ 1403.187581] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.195325] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Task: {'id': task-4302915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.633347] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1403.633696] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Creating directory with path [datastore2] vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1403.633827] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a18ee694-3b2f-4090-bf4f-5240ada9f2ee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.645137] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Created directory with path [datastore2] vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1403.645307] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Fetch image to [datastore2] vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1403.645450] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1403.646206] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7d4297-4ed9-4d52-bec6-195e479ae7a7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.652321] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb940caf-6d71-462e-a919-ffdb2543bd96 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.661032] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a67d062-968c-4d97-bf45-a0453b51f7ee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.693189] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479bfa2a-0390-48d9-9a49-c58611bdcbb9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.699700] env[61855]: DEBUG oslo_vmware.api [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Task: {'id': task-4302915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06428} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.701093] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1403.701285] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1403.701461] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1403.701636] env[61855]: INFO nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1403.703404] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-991f093e-b9f5-448a-8540-a788319d1df0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.705269] env[61855]: DEBUG nova.compute.claims [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1403.705441] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.705677] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.728055] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1403.782182] env[61855]: DEBUG oslo_vmware.rw_handles [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1403.839243] env[61855]: DEBUG oslo_vmware.rw_handles [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1403.839426] env[61855]: DEBUG oslo_vmware.rw_handles [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1404.032541] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628be7bd-761b-4eb7-8888-bafa7845ee91 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.039927] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9177d994-1098-4c1c-b63c-3a7fdc135437 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.069683] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c79351f-9e08-4f5d-a6b7-e98974fb21fd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.076547] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98309b40-0f4a-4415-a717-da5d62725d08 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.089476] env[61855]: DEBUG nova.compute.provider_tree [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1404.098868] env[61855]: DEBUG nova.scheduler.client.report [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1404.117498] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.412s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.118078] env[61855]: ERROR nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1404.118078] env[61855]: Faults: ['InvalidArgument'] [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Traceback (most recent call last): [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self.driver.spawn(context, instance, image_meta, [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self._fetch_image_if_missing(context, vi) [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] image_cache(vi, tmp_image_ds_loc) [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] vm_util.copy_virtual_disk( [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] session._wait_for_task(vmdk_copy_task) [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] return self.wait_for_task(task_ref) [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] return evt.wait() [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] result = hub.switch() [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] return self.greenlet.switch() [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] self.f(*self.args, **self.kw) [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] raise exceptions.translate_fault(task_info.error) [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Faults: ['InvalidArgument'] [ 1404.118078] env[61855]: ERROR nova.compute.manager [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] [ 1404.119107] env[61855]: DEBUG nova.compute.utils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1404.120266] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Build of instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e was re-scheduled: A specified parameter was not correct: fileType [ 1404.120266] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1404.120638] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1404.120814] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1404.120987] env[61855]: DEBUG nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1404.121167] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1404.496806] env[61855]: DEBUG nova.network.neutron [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.510529] env[61855]: INFO nova.compute.manager [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Took 0.39 seconds to deallocate network for instance. [ 1404.607963] env[61855]: INFO nova.scheduler.client.report [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Deleted allocations for instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e [ 1404.628448] env[61855]: DEBUG oslo_concurrency.lockutils [None req-96a7f30b-0e9c-41a1-b97b-0aada78d84f0 tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 595.175s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.629755] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 398.314s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.629979] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Acquiring lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.630451] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.630451] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.632810] env[61855]: INFO nova.compute.manager [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Terminating instance [ 1404.634492] env[61855]: DEBUG nova.compute.manager [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1404.634752] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1404.635406] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86d3fb5e-0dcd-4744-aba3-351b4634b2ce {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.641067] env[61855]: DEBUG nova.compute.manager [None req-76c349b8-60a2-4043-bda2-83ca782c628f tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] [instance: 6a976d89-a637-4bcc-83f3-fd509b5bad0d] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1404.647930] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b40ce4-4dfb-4ffd-929d-58ade5e6cf1f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.664694] env[61855]: DEBUG nova.compute.manager [None req-76c349b8-60a2-4043-bda2-83ca782c628f tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] [instance: 6a976d89-a637-4bcc-83f3-fd509b5bad0d] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1404.677288] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e could not be found. [ 1404.677488] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1404.677741] env[61855]: INFO nova.compute.manager [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1404.677946] env[61855]: DEBUG oslo.service.loopingcall [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.679974] env[61855]: DEBUG nova.compute.manager [-] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1404.680094] env[61855]: DEBUG nova.network.neutron [-] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1404.689744] env[61855]: DEBUG oslo_concurrency.lockutils [None req-76c349b8-60a2-4043-bda2-83ca782c628f tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] Lock "6a976d89-a637-4bcc-83f3-fd509b5bad0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.086s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.699478] env[61855]: DEBUG nova.compute.manager [None req-bac72562-6f06-4e67-90c9-d8ab97481937 tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] [instance: b5ad73e6-2c48-41c4-85f9-3b9f74afb983] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1404.703066] env[61855]: DEBUG nova.network.neutron [-] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.711210] env[61855]: INFO nova.compute.manager [-] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] Took 0.03 seconds to deallocate network for instance. [ 1404.726777] env[61855]: DEBUG nova.compute.manager [None req-bac72562-6f06-4e67-90c9-d8ab97481937 tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] [instance: b5ad73e6-2c48-41c4-85f9-3b9f74afb983] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1404.746601] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bac72562-6f06-4e67-90c9-d8ab97481937 tempest-ServerShowV247Test-598026718 tempest-ServerShowV247Test-598026718-project-member] Lock "b5ad73e6-2c48-41c4-85f9-3b9f74afb983" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.957s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.758725] env[61855]: DEBUG nova.compute.manager [None req-1872625f-a5f8-4362-b063-121c1eff9ce3 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1404.788118] env[61855]: DEBUG nova.compute.manager [None req-1872625f-a5f8-4362-b063-121c1eff9ce3 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1404.819300] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d98c5380-5bdd-4d5c-916b-32505d55887b tempest-AttachInterfacesV270Test-997216403 tempest-AttachInterfacesV270Test-997216403-project-member] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.821267] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1872625f-a5f8-4362-b063-121c1eff9ce3 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "c1bcd1c2-df8c-40fb-a15f-e80a0698bdb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.732s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.821735] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 271.538s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.821920] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1404.822101] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "bd1bdc4a-33cf-4c92-aa67-ee85c0b3900e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.832808] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1404.883736] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.883736] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.883736] env[61855]: INFO nova.compute.claims [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1405.133139] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb880d9-bd06-4d77-8815-97fd9a1c9e52 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.141177] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049cc904-d31a-410a-82f8-8f9c5a8e1c28 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.171872] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5a86fd-f3db-4f54-8ae5-0772789e386d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.179215] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085fe6ff-62a8-4f42-9625-5d8541f31dae {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.192575] env[61855]: DEBUG nova.compute.provider_tree [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1405.201391] env[61855]: DEBUG nova.scheduler.client.report [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1405.219601] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.220121] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1405.253714] env[61855]: DEBUG nova.compute.utils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1405.255250] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1405.255437] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1405.292617] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1405.365319] env[61855]: DEBUG nova.policy [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88a9ecbe170e4082839598df9a68bad5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c1e133fd8324c52aad68b8396ae1ae4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1405.370273] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1405.395257] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1405.395506] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1405.395694] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1405.395848] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1405.395997] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1405.396163] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1405.396374] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1405.396535] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1405.396704] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1405.396867] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1405.397363] env[61855]: DEBUG nova.virt.hardware [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1405.397941] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd6ac8f-555a-4cda-bd06-6833e09cb475 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.406152] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e191da-f4c3-4c42-92b5-23d83aed3d55 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.745618] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Successfully created port: 17012166-2237-49b4-b9c9-48251fb26140 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1405.966891] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.460641] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Successfully updated port: 17012166-2237-49b4-b9c9-48251fb26140 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1406.473224] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.473948] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquired lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.474182] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1406.527492] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1406.534288] env[61855]: DEBUG nova.compute.manager [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Received event network-vif-plugged-17012166-2237-49b4-b9c9-48251fb26140 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1406.534737] env[61855]: DEBUG oslo_concurrency.lockutils [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] Acquiring lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.535066] env[61855]: DEBUG oslo_concurrency.lockutils [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.535293] env[61855]: DEBUG oslo_concurrency.lockutils [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.535499] env[61855]: DEBUG nova.compute.manager [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] No waiting events found dispatching network-vif-plugged-17012166-2237-49b4-b9c9-48251fb26140 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1406.535703] env[61855]: WARNING nova.compute.manager [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Received unexpected event network-vif-plugged-17012166-2237-49b4-b9c9-48251fb26140 for instance with vm_state building and task_state deleting. [ 1406.535925] env[61855]: DEBUG nova.compute.manager [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Received event network-changed-17012166-2237-49b4-b9c9-48251fb26140 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1406.536160] env[61855]: DEBUG nova.compute.manager [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Refreshing instance network info cache due to event network-changed-17012166-2237-49b4-b9c9-48251fb26140. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1406.536335] env[61855]: DEBUG oslo_concurrency.lockutils [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] Acquiring lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.742579] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Updating instance_info_cache with network_info: [{"id": "17012166-2237-49b4-b9c9-48251fb26140", "address": "fa:16:3e:5f:53:7f", "network": {"id": "4a85fb43-2ffa-4e10-97b8-7d9615355b9f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1273533208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c1e133fd8324c52aad68b8396ae1ae4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17012166-22", "ovs_interfaceid": "17012166-2237-49b4-b9c9-48251fb26140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.753516] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Releasing lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.753844] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance network_info: |[{"id": "17012166-2237-49b4-b9c9-48251fb26140", "address": "fa:16:3e:5f:53:7f", "network": {"id": "4a85fb43-2ffa-4e10-97b8-7d9615355b9f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1273533208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c1e133fd8324c52aad68b8396ae1ae4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17012166-22", "ovs_interfaceid": "17012166-2237-49b4-b9c9-48251fb26140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1406.754170] env[61855]: DEBUG oslo_concurrency.lockutils [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] Acquired lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.754351] env[61855]: DEBUG nova.network.neutron [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Refreshing network info cache for port 17012166-2237-49b4-b9c9-48251fb26140 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1406.755462] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:53:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17012166-2237-49b4-b9c9-48251fb26140', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1406.763163] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Creating folder: Project (3c1e133fd8324c52aad68b8396ae1ae4). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1406.764009] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-044c05bb-289c-45be-8314-58bd50d049a8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.776600] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Created folder: Project (3c1e133fd8324c52aad68b8396ae1ae4) in parent group-v847048. [ 1406.776792] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Creating folder: Instances. Parent ref: group-v847132. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1406.777021] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b402add2-bb80-49eb-9deb-44a72e899d11 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.785282] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Created folder: Instances in parent group-v847132. [ 1406.785506] env[61855]: DEBUG oslo.service.loopingcall [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1406.785683] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1406.785873] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea80a96e-6b31-4ebb-8d7c-90dc756cad10 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.806819] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1406.806819] env[61855]: value = "task-4302918" [ 1406.806819] env[61855]: _type = "Task" [ 1406.806819] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.814205] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302918, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.010699] env[61855]: DEBUG nova.network.neutron [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Updated VIF entry in instance network info cache for port 17012166-2237-49b4-b9c9-48251fb26140. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1407.011080] env[61855]: DEBUG nova.network.neutron [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Updating instance_info_cache with network_info: [{"id": "17012166-2237-49b4-b9c9-48251fb26140", "address": "fa:16:3e:5f:53:7f", "network": {"id": "4a85fb43-2ffa-4e10-97b8-7d9615355b9f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1273533208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c1e133fd8324c52aad68b8396ae1ae4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17012166-22", "ovs_interfaceid": "17012166-2237-49b4-b9c9-48251fb26140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.020510] env[61855]: DEBUG oslo_concurrency.lockutils [req-31bf6147-02ec-4b5f-8ee1-12780452f3d1 req-de149404-b3df-4cbd-9e4a-8d474c834f03 service nova] Releasing lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.316956] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302918, 'name': CreateVM_Task, 'duration_secs': 0.283808} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.319044] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1407.319044] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.319044] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.319044] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1407.319044] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aa9d694-7d6a-4978-9947-5ddf1c2b5924 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.323190] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Waiting for the task: (returnval){ [ 1407.323190] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5285f929-dcfb-04b4-7ad4-df905d5f5c60" [ 1407.323190] env[61855]: _type = "Task" [ 1407.323190] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.330431] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5285f929-dcfb-04b4-7ad4-df905d5f5c60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.833669] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.833932] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1407.834153] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.924631] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.924913] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61855) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1427.932840] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.933225] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.933265] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1427.933397] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1427.955775] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.955925] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956066] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956198] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956322] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956445] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956567] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956722] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956902] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.956984] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1427.957097] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1427.957585] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.957770] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.957906] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1428.924903] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1428.925180] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.925533] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.934114] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.948312] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.948555] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.948694] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.948848] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1430.949984] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04190b3b-3069-49dc-a744-d25ef2c0d1fb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.958899] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44825ae-a41a-4fd0-92e8-0e9dd332ca3c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.972794] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433ebf17-f1fa-4601-bb9c-94a0e2a81e13 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.979050] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272eb128-c555-4125-8561-57aa4d46faf5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.008050] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180683MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1431.008160] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.008312] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.147208] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.147380] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.147512] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.147636] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.147757] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.147874] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.147991] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.148180] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.148316] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.148437] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.160829] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 2b747db5-efe7-431d-8d6c-58fa54e2c111 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.172282] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 13d55fae-9923-4451-961a-f75ef39f7548 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.182696] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.192925] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 15d55991-fc11-45a0-ac77-1c00ce55dce7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.203298] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 15388773-61c4-4ca4-9f61-aec7c22f4081 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.214828] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.215085] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1431.215311] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1431.403409] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206233fe-7151-4c17-8798-e7a99944b8ed {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.411101] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a89e50-a76c-4257-90cd-620d3d0c698a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.440407] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88ce824-b9f7-4cec-8a16-7278abbecfc6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.447232] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0225a908-5979-4c5e-ae55-3754515455c9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.459790] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.467763] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1431.482639] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1431.482822] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.475s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.924732] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.925064] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1431.935432] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] There are 0 instances to clean {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1432.936058] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.925349] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.251988] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "8653615e-3254-436e-984d-e52fdfb86ce4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.252363] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "8653615e-3254-436e-984d-e52fdfb86ce4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.891420] env[61855]: WARNING oslo_vmware.rw_handles [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1449.891420] env[61855]: ERROR oslo_vmware.rw_handles [ 1449.891979] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1449.893914] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1449.894245] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Copying Virtual Disk [datastore2] vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/d5255343-a476-4929-ae53-f72b775abf59/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1449.894600] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2077953-01c9-4229-a2d9-096415bfe61b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.902148] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Waiting for the task: (returnval){ [ 1449.902148] env[61855]: value = "task-4302919" [ 1449.902148] env[61855]: _type = "Task" [ 1449.902148] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.909981] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Task: {'id': task-4302919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.412490] env[61855]: DEBUG oslo_vmware.exceptions [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1450.412788] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.413338] env[61855]: ERROR nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1450.413338] env[61855]: Faults: ['InvalidArgument'] [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Traceback (most recent call last): [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] yield resources [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self.driver.spawn(context, instance, image_meta, [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self._fetch_image_if_missing(context, vi) [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] image_cache(vi, tmp_image_ds_loc) [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] vm_util.copy_virtual_disk( [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] session._wait_for_task(vmdk_copy_task) [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] return self.wait_for_task(task_ref) [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] return evt.wait() [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] result = hub.switch() [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] return self.greenlet.switch() [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self.f(*self.args, **self.kw) [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] raise exceptions.translate_fault(task_info.error) [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Faults: ['InvalidArgument'] [ 1450.413338] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] [ 1450.414335] env[61855]: INFO nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Terminating instance [ 1450.415350] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.415577] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1450.416261] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1450.416415] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1450.416649] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb97352d-504a-4239-913f-be88a76b3b9b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.418958] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bca7917-ec63-4fc5-b880-e75464572a90 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.425613] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1450.425832] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0629e202-6453-4474-9cb1-be142c41adbf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.427948] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1450.428136] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1450.429114] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54226ebd-80cf-43b6-8eba-c23847a9183f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.433625] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Waiting for the task: (returnval){ [ 1450.433625] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52778676-9be6-e231-d396-1e405ec11e22" [ 1450.433625] env[61855]: _type = "Task" [ 1450.433625] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.443886] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52778676-9be6-e231-d396-1e405ec11e22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.488523] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1450.488680] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1450.488857] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Deleting the datastore file [datastore2] 1f2b9ec1-5449-45a9-9691-857b15aaa9ff {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1450.489150] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c191c7ce-7911-4116-819d-a94f0d09299a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.495282] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Waiting for the task: (returnval){ [ 1450.495282] env[61855]: value = "task-4302921" [ 1450.495282] env[61855]: _type = "Task" [ 1450.495282] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.502909] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Task: {'id': task-4302921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.943713] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1450.943983] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Creating directory with path [datastore2] vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1450.944230] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2ba5b77-1439-4368-88b8-e68308e7c992 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.955385] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Created directory with path [datastore2] vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1450.955579] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Fetch image to [datastore2] vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1450.955760] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1450.956549] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3742b7-c5fd-41ea-b0cb-507dd58ed2e7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.962867] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcd09e0-1244-48d6-ad84-f95a8594540a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.972010] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf67407-e9d7-453c-9a4a-592bdd1ac81c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.005348] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edd87b1-f6de-4a83-962b-b70d3b48c2a9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.011928] env[61855]: DEBUG oslo_vmware.api [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Task: {'id': task-4302921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076472} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.013350] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1451.013540] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1451.013712] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1451.013884] env[61855]: INFO nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1451.015695] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8b6f8b6a-f3e2-48be-b43e-a14d30699abb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.017493] env[61855]: DEBUG nova.compute.claims [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1451.017679] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.017891] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.038654] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1451.092584] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1451.151813] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1451.152086] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1451.289021] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6c275e-3aad-493b-841a-095e32750051 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.296831] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f06663-4efd-431a-a066-4555c39a4455 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.326220] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebf2320-b9b4-4ed2-9455-c47d4f734683 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.333098] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5d881a-37ed-444e-b485-26ae7ac675f7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.345897] env[61855]: DEBUG nova.compute.provider_tree [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.375275] env[61855]: DEBUG nova.scheduler.client.report [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1451.389833] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.372s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.390424] env[61855]: ERROR nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1451.390424] env[61855]: Faults: ['InvalidArgument'] [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Traceback (most recent call last): [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self.driver.spawn(context, instance, image_meta, [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self._fetch_image_if_missing(context, vi) [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] image_cache(vi, tmp_image_ds_loc) [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] vm_util.copy_virtual_disk( [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] session._wait_for_task(vmdk_copy_task) [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] return self.wait_for_task(task_ref) [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] return evt.wait() [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] result = hub.switch() [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] return self.greenlet.switch() [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] self.f(*self.args, **self.kw) [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] raise exceptions.translate_fault(task_info.error) [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Faults: ['InvalidArgument'] [ 1451.390424] env[61855]: ERROR nova.compute.manager [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] [ 1451.391353] env[61855]: DEBUG nova.compute.utils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1451.392655] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Build of instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff was re-scheduled: A specified parameter was not correct: fileType [ 1451.392655] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1451.393048] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1451.393252] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1451.393442] env[61855]: DEBUG nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1451.393607] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1451.712295] env[61855]: DEBUG nova.network.neutron [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.724797] env[61855]: INFO nova.compute.manager [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Took 0.33 seconds to deallocate network for instance. [ 1451.832758] env[61855]: INFO nova.scheduler.client.report [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Deleted allocations for instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff [ 1451.854669] env[61855]: DEBUG oslo_concurrency.lockutils [None req-640bf088-548d-4b23-8263-e3a6a1176493 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.139s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.855847] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.599s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.856097] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Acquiring lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.856310] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.856506] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.859159] env[61855]: INFO nova.compute.manager [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Terminating instance [ 1451.860576] env[61855]: DEBUG nova.compute.manager [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1451.860795] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1451.861490] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e316bf1c-8b7f-44c6-9c16-631c716311df {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.870430] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085d2dd8-6a00-4f25-a514-d70233b72e71 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.885513] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1451.898990] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f2b9ec1-5449-45a9-9691-857b15aaa9ff could not be found. [ 1451.899572] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1451.899572] env[61855]: INFO nova.compute.manager [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1451.899722] env[61855]: DEBUG oslo.service.loopingcall [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.899893] env[61855]: DEBUG nova.compute.manager [-] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1451.899981] env[61855]: DEBUG nova.network.neutron [-] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1451.909942] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1451.935390] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "4b5afb22-8251-44d4-ad3f-43bbb5f0f1f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.322s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.938079] env[61855]: DEBUG nova.network.neutron [-] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.945947] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 07bb6cf5-b125-4e08-a7fc-9b1c358d7e17] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1451.949112] env[61855]: INFO nova.compute.manager [-] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] Took 0.05 seconds to deallocate network for instance. [ 1451.968078] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 07bb6cf5-b125-4e08-a7fc-9b1c358d7e17] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1451.987627] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "07bb6cf5-b125-4e08-a7fc-9b1c358d7e17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.348s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.998749] env[61855]: DEBUG nova.compute.manager [None req-2716f3c9-6e79-4628-b846-2f9f93420093 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 4bf2bcb4-fc3c-4051-9e38-128d65bd1c82] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1452.023665] env[61855]: DEBUG nova.compute.manager [None req-2716f3c9-6e79-4628-b846-2f9f93420093 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] [instance: 4bf2bcb4-fc3c-4051-9e38-128d65bd1c82] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1452.040354] env[61855]: DEBUG oslo_concurrency.lockutils [None req-0dec5ca7-7da3-4a52-a38a-c147b412910e tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.042023] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 318.758s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.042125] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 1f2b9ec1-5449-45a9-9691-857b15aaa9ff] During sync_power_state the instance has a pending task (deleting). Skip. [ 1452.042501] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "1f2b9ec1-5449-45a9-9691-857b15aaa9ff" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.053149] env[61855]: DEBUG oslo_concurrency.lockutils [None req-2716f3c9-6e79-4628-b846-2f9f93420093 tempest-AttachInterfacesTestJSON-532148952 tempest-AttachInterfacesTestJSON-532148952-project-member] Lock "4bf2bcb4-fc3c-4051-9e38-128d65bd1c82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.824s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.063243] env[61855]: DEBUG nova.compute.manager [None req-118829e0-8405-4900-a386-900d0c015ee2 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: 2b747db5-efe7-431d-8d6c-58fa54e2c111] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1452.088282] env[61855]: DEBUG nova.compute.manager [None req-118829e0-8405-4900-a386-900d0c015ee2 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] [instance: 2b747db5-efe7-431d-8d6c-58fa54e2c111] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1452.112948] env[61855]: DEBUG oslo_concurrency.lockutils [None req-118829e0-8405-4900-a386-900d0c015ee2 tempest-ImagesTestJSON-1812391808 tempest-ImagesTestJSON-1812391808-project-member] Lock "2b747db5-efe7-431d-8d6c-58fa54e2c111" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.621s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.121533] env[61855]: DEBUG nova.compute.manager [None req-4dff8e64-2c3c-424d-bbe2-4f2edd2b3d00 tempest-ServersNegativeTestMultiTenantJSON-1424545647 tempest-ServersNegativeTestMultiTenantJSON-1424545647-project-member] [instance: 13d55fae-9923-4451-961a-f75ef39f7548] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1452.147125] env[61855]: DEBUG nova.compute.manager [None req-4dff8e64-2c3c-424d-bbe2-4f2edd2b3d00 tempest-ServersNegativeTestMultiTenantJSON-1424545647 tempest-ServersNegativeTestMultiTenantJSON-1424545647-project-member] [instance: 13d55fae-9923-4451-961a-f75ef39f7548] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1452.167520] env[61855]: DEBUG oslo_concurrency.lockutils [None req-4dff8e64-2c3c-424d-bbe2-4f2edd2b3d00 tempest-ServersNegativeTestMultiTenantJSON-1424545647 tempest-ServersNegativeTestMultiTenantJSON-1424545647-project-member] Lock "13d55fae-9923-4451-961a-f75ef39f7548" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.772s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.179943] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1452.234918] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.235326] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.236912] env[61855]: INFO nova.compute.claims [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1452.449018] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a293b052-83d8-408a-95ce-0314b8f1cd30 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.455325] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99e7545-86ea-49ab-8b0b-4b60fb654c6b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.484872] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7438ac8-2cb1-42e6-938f-23f0519dc273 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.491741] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999d58ee-1aed-4d5b-bcbd-27d55b93d92d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.504841] env[61855]: DEBUG nova.compute.provider_tree [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1452.514296] env[61855]: DEBUG nova.scheduler.client.report [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1452.528945] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.294s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.529454] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1452.565053] env[61855]: DEBUG nova.compute.utils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1452.566092] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1452.566267] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1452.576914] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1452.628173] env[61855]: DEBUG nova.policy [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5c2a17d1334b36a2495d9b9bda5783', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c13499e461e24fc6964aa428afe66651', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1452.637022] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1452.662140] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1452.662388] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1452.662550] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1452.662835] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1452.662905] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1452.663022] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1452.663296] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1452.663472] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1452.663644] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1452.663810] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1452.663982] env[61855]: DEBUG nova.virt.hardware [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1452.664849] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71050251-28a0-4daf-895a-446775b2e8b3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.672481] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81e724b-8e31-4737-9ca7-f5d9e77343ca {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.950440] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Successfully created port: d0bdd73a-2016-46b8-a59f-161ef4134e46 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1453.626162] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Successfully updated port: d0bdd73a-2016-46b8-a59f-161ef4134e46 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1453.638475] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "refresh_cache-a1cac88d-1c85-4f4a-9527-1be4dc7dba21" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.638650] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "refresh_cache-a1cac88d-1c85-4f4a-9527-1be4dc7dba21" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.638804] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1453.682319] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1453.750667] env[61855]: DEBUG nova.compute.manager [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Received event network-vif-plugged-d0bdd73a-2016-46b8-a59f-161ef4134e46 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1453.750940] env[61855]: DEBUG oslo_concurrency.lockutils [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] Acquiring lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.751235] env[61855]: DEBUG oslo_concurrency.lockutils [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.751340] env[61855]: DEBUG oslo_concurrency.lockutils [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.751492] env[61855]: DEBUG nova.compute.manager [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] No waiting events found dispatching network-vif-plugged-d0bdd73a-2016-46b8-a59f-161ef4134e46 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1453.751663] env[61855]: WARNING nova.compute.manager [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Received unexpected event network-vif-plugged-d0bdd73a-2016-46b8-a59f-161ef4134e46 for instance with vm_state building and task_state spawning. [ 1453.751827] env[61855]: DEBUG nova.compute.manager [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Received event network-changed-d0bdd73a-2016-46b8-a59f-161ef4134e46 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1453.751982] env[61855]: DEBUG nova.compute.manager [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Refreshing instance network info cache due to event network-changed-d0bdd73a-2016-46b8-a59f-161ef4134e46. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1453.752346] env[61855]: DEBUG oslo_concurrency.lockutils [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] Acquiring lock "refresh_cache-a1cac88d-1c85-4f4a-9527-1be4dc7dba21" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.844604] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Updating instance_info_cache with network_info: [{"id": "d0bdd73a-2016-46b8-a59f-161ef4134e46", "address": "fa:16:3e:80:4e:db", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0bdd73a-20", "ovs_interfaceid": "d0bdd73a-2016-46b8-a59f-161ef4134e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.857614] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "refresh_cache-a1cac88d-1c85-4f4a-9527-1be4dc7dba21" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.857871] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Instance network_info: |[{"id": "d0bdd73a-2016-46b8-a59f-161ef4134e46", "address": "fa:16:3e:80:4e:db", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0bdd73a-20", "ovs_interfaceid": "d0bdd73a-2016-46b8-a59f-161ef4134e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1453.858176] env[61855]: DEBUG oslo_concurrency.lockutils [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] Acquired lock "refresh_cache-a1cac88d-1c85-4f4a-9527-1be4dc7dba21" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.858357] env[61855]: DEBUG nova.network.neutron [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Refreshing network info cache for port d0bdd73a-2016-46b8-a59f-161ef4134e46 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1453.859429] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:4e:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1d468f87-964a-4fb6-bab3-b83f6f2646b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0bdd73a-2016-46b8-a59f-161ef4134e46', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.868507] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating folder: Project (c13499e461e24fc6964aa428afe66651). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1453.869550] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53a959b7-482d-49b0-a7fe-50eefc0e1543 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.883166] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created folder: Project (c13499e461e24fc6964aa428afe66651) in parent group-v847048. [ 1453.883349] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating folder: Instances. Parent ref: group-v847135. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1453.883573] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b50e70f-c00b-45e3-a568-d337cffcc351 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.892115] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created folder: Instances in parent group-v847135. [ 1453.892341] env[61855]: DEBUG oslo.service.loopingcall [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.892517] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1453.892706] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4347eaea-f23e-44b4-9a0a-a06bfde66967 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.914905] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.914905] env[61855]: value = "task-4302924" [ 1453.914905] env[61855]: _type = "Task" [ 1453.914905] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.922848] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302924, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.393071] env[61855]: DEBUG nova.network.neutron [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Updated VIF entry in instance network info cache for port d0bdd73a-2016-46b8-a59f-161ef4134e46. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1454.393458] env[61855]: DEBUG nova.network.neutron [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Updating instance_info_cache with network_info: [{"id": "d0bdd73a-2016-46b8-a59f-161ef4134e46", "address": "fa:16:3e:80:4e:db", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0bdd73a-20", "ovs_interfaceid": "d0bdd73a-2016-46b8-a59f-161ef4134e46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.403928] env[61855]: DEBUG oslo_concurrency.lockutils [req-bca66ac1-9c02-4175-9f62-ce00ed006f85 req-09068418-c1dc-4bb3-8829-1b880af2e284 service nova] Releasing lock "refresh_cache-a1cac88d-1c85-4f4a-9527-1be4dc7dba21" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.424133] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302924, 'name': CreateVM_Task, 'duration_secs': 0.341275} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.424282] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1454.424890] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.425065] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.425377] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1454.425616] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6302874f-1c8a-4491-a23b-b5772d67d61b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.429728] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 1454.429728] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52905c54-8ee8-e16b-6faa-5a62357727d2" [ 1454.429728] env[61855]: _type = "Task" [ 1454.429728] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.438037] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52905c54-8ee8-e16b-6faa-5a62357727d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.941463] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.941641] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1454.941854] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.120786] env[61855]: DEBUG oslo_concurrency.lockutils [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.142568] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.143238] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.919413] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.924807] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.924888] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1489.925034] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1489.950091] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950091] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950261] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950341] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950467] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950592] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950714] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950834] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.950952] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.951083] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1489.951208] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1489.951717] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.951897] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.952071] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.952211] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1490.925116] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1490.948222] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.924162] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.935696] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.936033] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.936082] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.936642] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1491.937623] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ca1e91-1af9-4d0c-93e0-18f8b56c7717 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.946505] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76cce00-9dbc-4336-926c-ffcfc0b2ddf9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.961537] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4a79c4-f644-4103-a6ef-de8ecee2aeb5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.967677] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c954e4d-5843-4ea5-9729-54c1cfcbfbdb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.996366] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180675MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1491.996509] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.996702] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.068211] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance adfd94b5-7e03-49d1-a445-c58b296e5185 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.068851] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.068851] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.068851] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.068851] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.069054] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.069481] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.069481] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.069709] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.069709] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1492.081798] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.094483] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.104841] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1492.105060] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1492.105211] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1492.120988] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing inventories for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1492.135811] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating ProviderTree inventory for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1492.135986] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1492.146083] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing aggregate associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, aggregates: None {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1492.163413] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing trait associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1492.301838] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead72f05-ee5f-4b66-b71a-188b3d6c5851 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.309322] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d26095-8211-4618-80a2-7024d65c7081 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.339338] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fd5121-bb47-4c7e-bfff-4a5982dfba69 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.345978] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c251bcf3-575c-4b67-a152-76261964d5b9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.361121] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.371052] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1492.384072] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1492.384327] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.388s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.384685] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.923995] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1497.088414] env[61855]: WARNING oslo_vmware.rw_handles [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1497.088414] env[61855]: ERROR oslo_vmware.rw_handles [ 1497.089291] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1497.090761] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1497.090991] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Copying Virtual Disk [datastore2] vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/39fcb717-2f08-4be3-9364-9606cc4ba697/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1497.091306] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83465429-d195-45aa-8440-4670b88d6f89 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.099174] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Waiting for the task: (returnval){ [ 1497.099174] env[61855]: value = "task-4302925" [ 1497.099174] env[61855]: _type = "Task" [ 1497.099174] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.107949] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Task: {'id': task-4302925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.610142] env[61855]: DEBUG oslo_vmware.exceptions [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1497.610440] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.610987] env[61855]: ERROR nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1497.610987] env[61855]: Faults: ['InvalidArgument'] [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Traceback (most recent call last): [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] yield resources [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self.driver.spawn(context, instance, image_meta, [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self._fetch_image_if_missing(context, vi) [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] image_cache(vi, tmp_image_ds_loc) [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] vm_util.copy_virtual_disk( [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] session._wait_for_task(vmdk_copy_task) [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] return self.wait_for_task(task_ref) [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] return evt.wait() [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] result = hub.switch() [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] return self.greenlet.switch() [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self.f(*self.args, **self.kw) [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] raise exceptions.translate_fault(task_info.error) [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Faults: ['InvalidArgument'] [ 1497.610987] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] [ 1497.611980] env[61855]: INFO nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Terminating instance [ 1497.612904] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.613912] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1497.613912] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cb19b1d-1ca9-4eac-9b58-9af4996dfd71 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.618018] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1497.618018] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1497.618018] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7c96d3-1f39-4d99-9d98-cb63096d94a9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.625102] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1497.625475] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67300c37-e818-4758-b361-d39a1ec37b27 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.627750] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1497.628047] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1497.629092] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86c7a36e-7fb0-4a13-a722-cee914a7158f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.633560] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Waiting for the task: (returnval){ [ 1497.633560] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a1fc94-5f12-ecf7-b31b-59752aef4bcb" [ 1497.633560] env[61855]: _type = "Task" [ 1497.633560] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.648530] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a1fc94-5f12-ecf7-b31b-59752aef4bcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.991065] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1497.991214] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1497.991404] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Deleting the datastore file [datastore2] adfd94b5-7e03-49d1-a445-c58b296e5185 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1497.991687] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dae3641-9026-420a-afa8-47d057c241d0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.998402] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Waiting for the task: (returnval){ [ 1497.998402] env[61855]: value = "task-4302927" [ 1497.998402] env[61855]: _type = "Task" [ 1497.998402] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.007508] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Task: {'id': task-4302927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.146027] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1498.146386] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Creating directory with path [datastore2] vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1498.146690] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-515e1a0b-074e-4972-bae0-3ad7026c8596 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.161056] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Created directory with path [datastore2] vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1498.161056] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Fetch image to [datastore2] vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1498.161056] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1498.161352] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2236a715-7295-4184-9593-f46ce21ee911 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.167790] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15178a2d-85b6-46ca-8be5-cae2d90be322 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.177228] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20aa2ee8-f897-44b5-af2d-995a4c23251c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.208720] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1364f672-5619-4070-b67c-e3788f3a9582 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.215210] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5157bf07-1f63-4b55-9b91-407cc3de13dd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.235440] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1498.286035] env[61855]: DEBUG oslo_vmware.rw_handles [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1498.346386] env[61855]: DEBUG oslo_vmware.rw_handles [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1498.346588] env[61855]: DEBUG oslo_vmware.rw_handles [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1498.508537] env[61855]: DEBUG oslo_vmware.api [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Task: {'id': task-4302927, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075664} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.508736] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.508920] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1498.509155] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1498.509343] env[61855]: INFO nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Took 0.89 seconds to destroy the instance on the hypervisor. [ 1498.511545] env[61855]: DEBUG nova.compute.claims [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1498.511719] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.511934] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.730036] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244ee6cc-12bb-43c7-a3c4-bd1a83961401 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.738056] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4793b802-c05a-4cf1-a0d4-a4f2a090307a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.767525] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9038579-56e5-4076-a903-89b6dd04c771 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.774999] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d14bc2-8777-411e-9f22-77ca7c76501c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.789470] env[61855]: DEBUG nova.compute.provider_tree [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.797779] env[61855]: DEBUG nova.scheduler.client.report [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1498.811499] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.299s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.812035] env[61855]: ERROR nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1498.812035] env[61855]: Faults: ['InvalidArgument'] [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Traceback (most recent call last): [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self.driver.spawn(context, instance, image_meta, [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self._fetch_image_if_missing(context, vi) [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] image_cache(vi, tmp_image_ds_loc) [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] vm_util.copy_virtual_disk( [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] session._wait_for_task(vmdk_copy_task) [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] return self.wait_for_task(task_ref) [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] return evt.wait() [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] result = hub.switch() [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] return self.greenlet.switch() [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] self.f(*self.args, **self.kw) [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] raise exceptions.translate_fault(task_info.error) [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Faults: ['InvalidArgument'] [ 1498.812035] env[61855]: ERROR nova.compute.manager [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] [ 1498.812800] env[61855]: DEBUG nova.compute.utils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1498.814089] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Build of instance adfd94b5-7e03-49d1-a445-c58b296e5185 was re-scheduled: A specified parameter was not correct: fileType [ 1498.814089] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1498.814456] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1498.814629] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1498.814843] env[61855]: DEBUG nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1498.815109] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1499.192639] env[61855]: DEBUG nova.network.neutron [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.207360] env[61855]: INFO nova.compute.manager [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Took 0.39 seconds to deallocate network for instance. [ 1499.331099] env[61855]: INFO nova.scheduler.client.report [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Deleted allocations for instance adfd94b5-7e03-49d1-a445-c58b296e5185 [ 1499.359219] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f4e75096-3c2f-4c98-88db-684680264f30 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.831s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.360559] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.193s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.360787] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Acquiring lock "adfd94b5-7e03-49d1-a445-c58b296e5185-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.360999] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.361191] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.363169] env[61855]: INFO nova.compute.manager [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Terminating instance [ 1499.364868] env[61855]: DEBUG nova.compute.manager [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1499.365082] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1499.365555] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9048bf82-68a5-478d-a0d8-6b8cf63bd2fc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.375207] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87915ae-318a-4994-8e9f-3c2bdd683ff8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.386354] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: 15d55991-fc11-45a0-ac77-1c00ce55dce7] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1499.408313] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance adfd94b5-7e03-49d1-a445-c58b296e5185 could not be found. [ 1499.408560] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1499.409334] env[61855]: INFO nova.compute.manager [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1499.409334] env[61855]: DEBUG oslo.service.loopingcall [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.409525] env[61855]: DEBUG nova.compute.manager [-] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1499.409525] env[61855]: DEBUG nova.network.neutron [-] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1499.414971] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: 15d55991-fc11-45a0-ac77-1c00ce55dce7] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1499.435958] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "15d55991-fc11-45a0-ac77-1c00ce55dce7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.894s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.437612] env[61855]: DEBUG nova.network.neutron [-] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.446804] env[61855]: INFO nova.compute.manager [-] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] Took 0.04 seconds to deallocate network for instance. [ 1499.452188] env[61855]: DEBUG nova.compute.manager [None req-efb2a940-07fb-4b4f-9aad-593a2b7ab420 tempest-ServerShowV257Test-639370341 tempest-ServerShowV257Test-639370341-project-member] [instance: 15388773-61c4-4ca4-9f61-aec7c22f4081] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1499.475142] env[61855]: DEBUG nova.compute.manager [None req-efb2a940-07fb-4b4f-9aad-593a2b7ab420 tempest-ServerShowV257Test-639370341 tempest-ServerShowV257Test-639370341-project-member] [instance: 15388773-61c4-4ca4-9f61-aec7c22f4081] Instance disappeared before build. {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1499.500720] env[61855]: DEBUG oslo_concurrency.lockutils [None req-efb2a940-07fb-4b4f-9aad-593a2b7ab420 tempest-ServerShowV257Test-639370341 tempest-ServerShowV257Test-639370341-project-member] Lock "15388773-61c4-4ca4-9f61-aec7c22f4081" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.454s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.511713] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1499.563296] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ee5a9abb-20fe-4c16-acf8-ba8ec5af7807 tempest-ServerMetadataTestJSON-484224096 tempest-ServerMetadataTestJSON-484224096-project-member] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.564526] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 366.280s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.564526] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: adfd94b5-7e03-49d1-a445-c58b296e5185] During sync_power_state the instance has a pending task (deleting). Skip. [ 1499.564665] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "adfd94b5-7e03-49d1-a445-c58b296e5185" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.569735] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.570031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.571554] env[61855]: INFO nova.compute.claims [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1499.757021] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d472a1c0-60e2-45c7-a316-2a962a0f3553 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.764524] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1fd603-9139-4d0e-839f-1711ee004d8f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.794257] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0e99eb-8a2e-4924-a3f5-eae6e50cbcee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.801264] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041e6642-5008-46e7-a2e3-d38bf7449d46 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.813947] env[61855]: DEBUG nova.compute.provider_tree [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.822501] env[61855]: DEBUG nova.scheduler.client.report [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1499.836160] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.266s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.836636] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1499.868120] env[61855]: DEBUG nova.compute.utils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1499.869545] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1499.869748] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1499.878242] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1499.923279] env[61855]: DEBUG nova.policy [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24e7161122834e938ca6156e3f8c2855', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1eb57982dd094432baccff494449adad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1499.942637] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1499.968878] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1499.969165] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1499.969334] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1499.969520] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1499.969668] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1499.969888] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1499.970197] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1499.970416] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1499.970634] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1499.970821] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1499.971000] env[61855]: DEBUG nova.virt.hardware [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1499.971904] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb2eea1-2a90-40b4-8a06-0685f39d32a5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.980137] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fde8d9-d1fc-4f20-8952-8a49c23f4360 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.245853] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Successfully created port: 08e252ee-ced3-4a58-8db8-cd72c1305928 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1501.029178] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Successfully updated port: 08e252ee-ced3-4a58-8db8-cd72c1305928 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1501.039589] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "refresh_cache-db4efbf1-db2e-404b-90fb-57c6a56bf7c7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.039744] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "refresh_cache-db4efbf1-db2e-404b-90fb-57c6a56bf7c7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.039894] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1501.083217] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1501.228499] env[61855]: DEBUG nova.compute.manager [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Received event network-vif-plugged-08e252ee-ced3-4a58-8db8-cd72c1305928 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1501.229269] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] Acquiring lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.229269] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.229269] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.229490] env[61855]: DEBUG nova.compute.manager [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] No waiting events found dispatching network-vif-plugged-08e252ee-ced3-4a58-8db8-cd72c1305928 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1501.229490] env[61855]: WARNING nova.compute.manager [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Received unexpected event network-vif-plugged-08e252ee-ced3-4a58-8db8-cd72c1305928 for instance with vm_state building and task_state spawning. [ 1501.229627] env[61855]: DEBUG nova.compute.manager [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Received event network-changed-08e252ee-ced3-4a58-8db8-cd72c1305928 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1501.229781] env[61855]: DEBUG nova.compute.manager [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Refreshing instance network info cache due to event network-changed-08e252ee-ced3-4a58-8db8-cd72c1305928. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1501.229951] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] Acquiring lock "refresh_cache-db4efbf1-db2e-404b-90fb-57c6a56bf7c7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.250848] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Updating instance_info_cache with network_info: [{"id": "08e252ee-ced3-4a58-8db8-cd72c1305928", "address": "fa:16:3e:65:db:b5", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08e252ee-ce", "ovs_interfaceid": "08e252ee-ced3-4a58-8db8-cd72c1305928", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.261744] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "refresh_cache-db4efbf1-db2e-404b-90fb-57c6a56bf7c7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.262035] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Instance network_info: |[{"id": "08e252ee-ced3-4a58-8db8-cd72c1305928", "address": "fa:16:3e:65:db:b5", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08e252ee-ce", "ovs_interfaceid": "08e252ee-ced3-4a58-8db8-cd72c1305928", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1501.262563] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] Acquired lock "refresh_cache-db4efbf1-db2e-404b-90fb-57c6a56bf7c7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.262747] env[61855]: DEBUG nova.network.neutron [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Refreshing network info cache for port 08e252ee-ced3-4a58-8db8-cd72c1305928 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1501.263881] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:db:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08e252ee-ced3-4a58-8db8-cd72c1305928', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1501.271797] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating folder: Project (1eb57982dd094432baccff494449adad). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1501.274860] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70bc38a5-8c43-4250-a7f6-5b5051db0e44 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.285308] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Created folder: Project (1eb57982dd094432baccff494449adad) in parent group-v847048. [ 1501.285491] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating folder: Instances. Parent ref: group-v847138. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1501.285713] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e469d46-e2b6-4e0b-a587-d7435d8cc33e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.294995] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Created folder: Instances in parent group-v847138. [ 1501.295325] env[61855]: DEBUG oslo.service.loopingcall [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1501.295438] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1501.295608] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9a90ea7-ee3a-4ccf-ac57-d90fbe868ce6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.316160] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1501.316160] env[61855]: value = "task-4302930" [ 1501.316160] env[61855]: _type = "Task" [ 1501.316160] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.323491] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302930, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.608638] env[61855]: DEBUG nova.network.neutron [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Updated VIF entry in instance network info cache for port 08e252ee-ced3-4a58-8db8-cd72c1305928. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1501.612534] env[61855]: DEBUG nova.network.neutron [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Updating instance_info_cache with network_info: [{"id": "08e252ee-ced3-4a58-8db8-cd72c1305928", "address": "fa:16:3e:65:db:b5", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08e252ee-ce", "ovs_interfaceid": "08e252ee-ced3-4a58-8db8-cd72c1305928", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.626296] env[61855]: DEBUG oslo_concurrency.lockutils [req-ee5cbc20-5551-4ba5-9494-aed699df5198 req-9a488c2d-783f-4880-9177-09bc7995e249 service nova] Releasing lock "refresh_cache-db4efbf1-db2e-404b-90fb-57c6a56bf7c7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.826326] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302930, 'name': CreateVM_Task, 'duration_secs': 0.302765} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.826486] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1501.827162] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.827339] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.827699] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1501.827975] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7da9f7a8-4da7-4216-a6da-f336401dc46e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.832767] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 1501.832767] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]525d47f0-6f6f-35b0-28ee-40a36d05c836" [ 1501.832767] env[61855]: _type = "Task" [ 1501.832767] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.840269] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]525d47f0-6f6f-35b0-28ee-40a36d05c836, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.343524] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.343833] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1502.343992] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.506161] env[61855]: WARNING oslo_vmware.rw_handles [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1545.506161] env[61855]: ERROR oslo_vmware.rw_handles [ 1545.506707] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1545.508886] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1545.509145] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Copying Virtual Disk [datastore2] vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/4261d217-bab4-4fd9-83f7-b44e8a924ed7/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1545.509437] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6940e746-27e0-4fef-b03d-5b02520a5e09 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.518417] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Waiting for the task: (returnval){ [ 1545.518417] env[61855]: value = "task-4302931" [ 1545.518417] env[61855]: _type = "Task" [ 1545.518417] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.526403] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Task: {'id': task-4302931, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.029950] env[61855]: DEBUG oslo_vmware.exceptions [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1546.030258] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.030879] env[61855]: ERROR nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1546.030879] env[61855]: Faults: ['InvalidArgument'] [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Traceback (most recent call last): [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] yield resources [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self.driver.spawn(context, instance, image_meta, [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self._fetch_image_if_missing(context, vi) [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] image_cache(vi, tmp_image_ds_loc) [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] vm_util.copy_virtual_disk( [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] session._wait_for_task(vmdk_copy_task) [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] return self.wait_for_task(task_ref) [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] return evt.wait() [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] result = hub.switch() [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] return self.greenlet.switch() [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self.f(*self.args, **self.kw) [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] raise exceptions.translate_fault(task_info.error) [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Faults: ['InvalidArgument'] [ 1546.030879] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] [ 1546.032075] env[61855]: INFO nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Terminating instance [ 1546.032866] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.033093] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.033335] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bf30424-6cda-48a9-9e8f-3eef63679005 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.035643] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1546.035857] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1546.036597] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a48de3-3684-46fc-a092-41d6b3f28bf3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.043837] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1546.044961] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b6efa24-4619-4220-8883-b90b4e5619fd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.046243] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.046419] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1546.047121] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c52eebd-ca37-4bc6-b8ec-11b6856acb4c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.052235] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Waiting for the task: (returnval){ [ 1546.052235] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5276f025-b600-5e43-ebb9-46339849fb72" [ 1546.052235] env[61855]: _type = "Task" [ 1546.052235] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.059442] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5276f025-b600-5e43-ebb9-46339849fb72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.117422] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1546.117682] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1546.117812] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Deleting the datastore file [datastore2] cc01e7e2-26c0-4936-9dec-edd5578fe1e1 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1546.118098] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6423a30-54cd-4daa-917b-b673540aad6f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.124440] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Waiting for the task: (returnval){ [ 1546.124440] env[61855]: value = "task-4302933" [ 1546.124440] env[61855]: _type = "Task" [ 1546.124440] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.131936] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Task: {'id': task-4302933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.562901] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1546.563250] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Creating directory with path [datastore2] vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.563423] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ca12f1f-fa98-4ad6-9c73-367659d20ce7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.575015] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Created directory with path [datastore2] vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.575152] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Fetch image to [datastore2] vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1546.575317] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1546.576077] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a00173f-a8d6-4c5f-9f11-087ca21ac9cf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.583115] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05eb2436-046f-4909-8cb3-4d288684d07c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.591909] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb36fec0-6d16-4b90-a961-2c39c7c811c1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.623573] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ea75d5-3c54-4e68-89e9-e73174212dd4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.633798] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7c19af99-95a2-4bbd-823f-1e01236ef0f0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.635420] env[61855]: DEBUG oslo_vmware.api [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Task: {'id': task-4302933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07577} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.636185] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1546.636185] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1546.636185] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1546.636185] env[61855]: INFO nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1546.638401] env[61855]: DEBUG nova.compute.claims [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1546.638581] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.638811] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.660920] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1546.712596] env[61855]: DEBUG oslo_vmware.rw_handles [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1546.772369] env[61855]: DEBUG oslo_vmware.rw_handles [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1546.772640] env[61855]: DEBUG oslo_vmware.rw_handles [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1546.887498] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c69c876-0adb-4570-b506-a3d97492a751 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.895429] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa5b083-7316-415b-a289-bb319d996fd0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.924416] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0924438d-980d-490e-aedb-913538f1dcc4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.931326] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57648c3-8910-40c5-b3df-6d9d29249fae {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.943928] env[61855]: DEBUG nova.compute.provider_tree [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.952907] env[61855]: DEBUG nova.scheduler.client.report [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1546.967014] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.967542] env[61855]: ERROR nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1546.967542] env[61855]: Faults: ['InvalidArgument'] [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Traceback (most recent call last): [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self.driver.spawn(context, instance, image_meta, [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self._fetch_image_if_missing(context, vi) [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] image_cache(vi, tmp_image_ds_loc) [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] vm_util.copy_virtual_disk( [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] session._wait_for_task(vmdk_copy_task) [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] return self.wait_for_task(task_ref) [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] return evt.wait() [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] result = hub.switch() [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] return self.greenlet.switch() [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] self.f(*self.args, **self.kw) [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] raise exceptions.translate_fault(task_info.error) [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Faults: ['InvalidArgument'] [ 1546.967542] env[61855]: ERROR nova.compute.manager [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] [ 1546.968492] env[61855]: DEBUG nova.compute.utils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1546.969859] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Build of instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 was re-scheduled: A specified parameter was not correct: fileType [ 1546.969859] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1546.970242] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1546.970417] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1546.970594] env[61855]: DEBUG nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1546.970835] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1547.613149] env[61855]: DEBUG nova.network.neutron [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.626750] env[61855]: INFO nova.compute.manager [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Took 0.66 seconds to deallocate network for instance. [ 1547.740535] env[61855]: INFO nova.scheduler.client.report [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Deleted allocations for instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 [ 1547.763150] env[61855]: DEBUG oslo_concurrency.lockutils [None req-936f70ba-960a-468d-a924-3ffee0fb1ac2 tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.586s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.764326] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.610s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.764565] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Acquiring lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.764776] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.764946] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.767017] env[61855]: INFO nova.compute.manager [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Terminating instance [ 1547.768802] env[61855]: DEBUG nova.compute.manager [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1547.769049] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1547.769528] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aee6bc5d-b6fe-4024-b56e-4e454d2f3b22 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.777029] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1547.783553] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a43b73-2b21-478c-91a0-6cb1a1fbae72 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.815939] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cc01e7e2-26c0-4936-9dec-edd5578fe1e1 could not be found. [ 1547.816179] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1547.816360] env[61855]: INFO nova.compute.manager [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1547.816605] env[61855]: DEBUG oslo.service.loopingcall [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.819069] env[61855]: DEBUG nova.compute.manager [-] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1547.819069] env[61855]: DEBUG nova.network.neutron [-] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1547.833490] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.833726] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.835163] env[61855]: INFO nova.compute.claims [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1547.860523] env[61855]: DEBUG nova.network.neutron [-] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.876260] env[61855]: INFO nova.compute.manager [-] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] Took 0.06 seconds to deallocate network for instance. [ 1547.962258] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d8b53179-c605-46f4-b614-45983ad5306a tempest-ServerRescueNegativeTestJSON-1903703262 tempest-ServerRescueNegativeTestJSON-1903703262-project-member] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.963185] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 414.679s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.963396] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cc01e7e2-26c0-4936-9dec-edd5578fe1e1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1547.963576] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "cc01e7e2-26c0-4936-9dec-edd5578fe1e1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.019760] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d896b2ab-2d82-42cc-9653-f471360dae9a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.027392] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6997ee9-0f78-4ef5-9d5e-bf64154708ad {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.058042] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3b47d4-71af-4445-af68-d3260feae1cc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.064534] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6fccf8-35ce-49d6-b74f-74d7bf2d60d8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.077311] env[61855]: DEBUG nova.compute.provider_tree [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.085728] env[61855]: DEBUG nova.scheduler.client.report [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1548.099065] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.265s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.099529] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1548.130672] env[61855]: DEBUG nova.compute.utils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1548.132237] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1548.132237] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1548.140435] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1548.188975] env[61855]: DEBUG nova.policy [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4de3c0111ab43cc8c4514e3f1bd581f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4605a1ddcbd74806b1538915f5c76000', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1548.201332] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1548.226824] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1548.227097] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1548.227298] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1548.227497] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1548.227652] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1548.227805] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1548.228015] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1548.228192] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1548.228365] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1548.228531] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1548.228716] env[61855]: DEBUG nova.virt.hardware [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1548.229609] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a634a7-041e-4611-b12c-d1f938af4ac8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.237926] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c9ebf1-a757-40ac-9b63-de89e410b529 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.524902] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Successfully created port: b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1549.327508] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Successfully updated port: b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1549.342891] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "refresh_cache-8653615e-3254-436e-984d-e52fdfb86ce4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.343060] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquired lock "refresh_cache-8653615e-3254-436e-984d-e52fdfb86ce4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.343218] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1549.389406] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1549.555238] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Updating instance_info_cache with network_info: [{"id": "b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f", "address": "fa:16:3e:a6:dc:0e", "network": {"id": "ba027f6c-a8d1-4a7f-971a-955d325ac6b0", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-27676932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4605a1ddcbd74806b1538915f5c76000", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d6e4e2-1b", "ovs_interfaceid": "b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.565911] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Releasing lock "refresh_cache-8653615e-3254-436e-984d-e52fdfb86ce4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.566219] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Instance network_info: |[{"id": "b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f", "address": "fa:16:3e:a6:dc:0e", "network": {"id": "ba027f6c-a8d1-4a7f-971a-955d325ac6b0", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-27676932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4605a1ddcbd74806b1538915f5c76000", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d6e4e2-1b", "ovs_interfaceid": "b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1549.566626] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:dc:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9c4edd5-d88e-4996-afea-00130ace0dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1549.574600] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Creating folder: Project (4605a1ddcbd74806b1538915f5c76000). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1549.575225] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c62f7dfa-be0a-4512-86b2-fef92e9a4aa1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.585692] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Created folder: Project (4605a1ddcbd74806b1538915f5c76000) in parent group-v847048. [ 1549.585909] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Creating folder: Instances. Parent ref: group-v847141. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1549.586115] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4921ae4-1f87-4b40-b924-73930ed435e8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.594629] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Created folder: Instances in parent group-v847141. [ 1549.594849] env[61855]: DEBUG oslo.service.loopingcall [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1549.595029] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1549.595242] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e1cb9ee-53e6-4c48-99c2-03a3b47a0e0c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.613243] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1549.613243] env[61855]: value = "task-4302936" [ 1549.613243] env[61855]: _type = "Task" [ 1549.613243] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.620368] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302936, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.722055] env[61855]: DEBUG nova.compute.manager [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Received event network-vif-plugged-b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1549.722376] env[61855]: DEBUG oslo_concurrency.lockutils [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] Acquiring lock "8653615e-3254-436e-984d-e52fdfb86ce4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.722489] env[61855]: DEBUG oslo_concurrency.lockutils [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] Lock "8653615e-3254-436e-984d-e52fdfb86ce4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.722644] env[61855]: DEBUG oslo_concurrency.lockutils [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] Lock "8653615e-3254-436e-984d-e52fdfb86ce4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.722864] env[61855]: DEBUG nova.compute.manager [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] No waiting events found dispatching network-vif-plugged-b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1549.723026] env[61855]: WARNING nova.compute.manager [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Received unexpected event network-vif-plugged-b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f for instance with vm_state building and task_state spawning. [ 1549.723196] env[61855]: DEBUG nova.compute.manager [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Received event network-changed-b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1549.723355] env[61855]: DEBUG nova.compute.manager [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Refreshing instance network info cache due to event network-changed-b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1549.723538] env[61855]: DEBUG oslo_concurrency.lockutils [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] Acquiring lock "refresh_cache-8653615e-3254-436e-984d-e52fdfb86ce4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.723673] env[61855]: DEBUG oslo_concurrency.lockutils [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] Acquired lock "refresh_cache-8653615e-3254-436e-984d-e52fdfb86ce4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.724038] env[61855]: DEBUG nova.network.neutron [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Refreshing network info cache for port b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1549.919061] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1549.923794] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1550.079192] env[61855]: DEBUG nova.network.neutron [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Updated VIF entry in instance network info cache for port b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1550.079550] env[61855]: DEBUG nova.network.neutron [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Updating instance_info_cache with network_info: [{"id": "b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f", "address": "fa:16:3e:a6:dc:0e", "network": {"id": "ba027f6c-a8d1-4a7f-971a-955d325ac6b0", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-27676932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4605a1ddcbd74806b1538915f5c76000", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d6e4e2-1b", "ovs_interfaceid": "b2d6e4e2-1b0b-4d95-a03f-9d5858616a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.089664] env[61855]: DEBUG oslo_concurrency.lockutils [req-aa95abd9-c6db-4d37-a447-4d1128425177 req-2a74f35f-7d6b-4897-8072-258f0b9f0d99 service nova] Releasing lock "refresh_cache-8653615e-3254-436e-984d-e52fdfb86ce4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.123978] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302936, 'name': CreateVM_Task, 'duration_secs': 0.284938} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.124165] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1550.124826] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.124988] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.125333] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1550.125591] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3528a43e-51fe-40e6-8bea-7ed76cb0869b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.130329] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Waiting for the task: (returnval){ [ 1550.130329] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a1857d-bdb3-0735-fc19-f6e32fbdc3f9" [ 1550.130329] env[61855]: _type = "Task" [ 1550.130329] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.138760] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52a1857d-bdb3-0735-fc19-f6e32fbdc3f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.641067] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.641067] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1550.641067] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.924434] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1550.924648] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1550.924771] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1550.950456] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.950661] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.950833] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.950969] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.951111] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.951232] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.951351] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.951471] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.951588] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.951706] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1550.951827] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1550.952350] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1550.952494] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1551.924300] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.924742] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.924242] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.924635] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.936726] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.936946] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.937128] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.937288] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1553.938466] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4b844a-72a0-4ccc-8d5d-bb326ded1326 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.947138] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb44d73a-935f-4cbd-b668-75fbc9097bc9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.962084] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1460315-f249-4ad5-a8cc-f5f4d7c5e7bc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.968192] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5b1615-a18f-48e8-9428-34c19c2ecbc5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.996491] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180632MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1553.996631] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.996815] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.075044] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa9a75c4-371f-407e-a79e-133606a9fabc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075044] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075044] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075247] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075286] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075389] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075506] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075619] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075730] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.075845] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1554.086850] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1554.087080] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1554.087232] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1554.215112] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17439d1f-6c8c-4450-9bbb-05ad73c39024 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.222653] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95db00e1-7840-4a97-889a-3cbe2e3b027f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.253472] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86858d93-6d59-4b99-ad75-0870aee899db {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.260355] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45475076-d9e8-45de-b88e-e9be00dff8ee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.274646] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.283437] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1554.301307] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1554.301502] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.305s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.301633] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1583.958801] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.958801] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.676592] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.676869] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.416994] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.934662] env[61855]: WARNING oslo_vmware.rw_handles [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1593.934662] env[61855]: ERROR oslo_vmware.rw_handles [ 1593.935452] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1593.937093] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1593.937323] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Copying Virtual Disk [datastore2] vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/2aa00979-96c9-4d1d-bacb-e6491fa67407/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1593.937614] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de083d90-82e8-4acb-be90-c6ed5bda1604 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.946948] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Waiting for the task: (returnval){ [ 1593.946948] env[61855]: value = "task-4302937" [ 1593.946948] env[61855]: _type = "Task" [ 1593.946948] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.955558] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Task: {'id': task-4302937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.458587] env[61855]: DEBUG oslo_vmware.exceptions [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1594.458886] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.459485] env[61855]: ERROR nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.459485] env[61855]: Faults: ['InvalidArgument'] [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Traceback (most recent call last): [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] yield resources [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self.driver.spawn(context, instance, image_meta, [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self._fetch_image_if_missing(context, vi) [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] image_cache(vi, tmp_image_ds_loc) [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] vm_util.copy_virtual_disk( [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] session._wait_for_task(vmdk_copy_task) [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] return self.wait_for_task(task_ref) [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] return evt.wait() [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] result = hub.switch() [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] return self.greenlet.switch() [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self.f(*self.args, **self.kw) [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] raise exceptions.translate_fault(task_info.error) [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Faults: ['InvalidArgument'] [ 1594.459485] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] [ 1594.460589] env[61855]: INFO nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Terminating instance [ 1594.461364] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.461572] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.461803] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-655fe750-b3e9-4736-affb-e6384d4d0ed9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.465173] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1594.465320] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1594.466038] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd75a7c-16da-4feb-a7bd-757f7878a2a7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.469403] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.469579] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1594.470541] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c830566-ec6a-4bd9-a8dd-cd703fdb0058 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.474198] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1594.474667] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e66d9c1-d938-4c12-a985-4611bbc84167 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.476796] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Waiting for the task: (returnval){ [ 1594.476796] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5276b15b-c275-ea31-77b6-6946693181cb" [ 1594.476796] env[61855]: _type = "Task" [ 1594.476796] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.483959] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5276b15b-c275-ea31-77b6-6946693181cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.539608] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1594.539821] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1594.540013] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Deleting the datastore file [datastore2] aa9a75c4-371f-407e-a79e-133606a9fabc {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1594.540335] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6390bb05-964e-491e-9737-f3b201faa038 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.546784] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Waiting for the task: (returnval){ [ 1594.546784] env[61855]: value = "task-4302939" [ 1594.546784] env[61855]: _type = "Task" [ 1594.546784] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.554145] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Task: {'id': task-4302939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.990057] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1594.990057] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Creating directory with path [datastore2] vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.990057] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b70f3d3c-b9bc-45bc-b2d4-cfdf4131fe95 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.000848] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Created directory with path [datastore2] vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1595.001061] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Fetch image to [datastore2] vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1595.001251] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1595.002066] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ab8be5-a28c-4d5a-af03-4dfacd65daaf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.008345] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5c288b-962c-4111-a18b-6548c0049893 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.017194] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9808a1-86fe-4539-8ba2-d9128d25c13e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.047185] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1bd538-3fb4-43f2-a478-c45e03c98611 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.057852] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-04334846-18ba-49f2-ac0d-313d5b6cacd8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.059456] env[61855]: DEBUG oslo_vmware.api [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Task: {'id': task-4302939, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072483} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.059695] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1595.059879] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1595.060069] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1595.060271] env[61855]: INFO nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1595.062729] env[61855]: DEBUG nova.compute.claims [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1595.062904] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.063131] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.080986] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1595.135308] env[61855]: DEBUG oslo_vmware.rw_handles [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1595.195331] env[61855]: DEBUG oslo_vmware.rw_handles [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1595.195594] env[61855]: DEBUG oslo_vmware.rw_handles [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1595.335175] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a636fef-323b-4b8a-a831-e76e50d9ac11 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.342796] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1cf106-2343-4cb8-9474-bc6050d7048a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.373348] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b767a643-b706-4783-97a3-e0b200e41c76 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.379935] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb518a93-e89f-454a-b563-24dac85d4581 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.392463] env[61855]: DEBUG nova.compute.provider_tree [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.403064] env[61855]: DEBUG nova.scheduler.client.report [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1595.418206] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.355s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.418799] env[61855]: ERROR nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1595.418799] env[61855]: Faults: ['InvalidArgument'] [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Traceback (most recent call last): [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self.driver.spawn(context, instance, image_meta, [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self._fetch_image_if_missing(context, vi) [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] image_cache(vi, tmp_image_ds_loc) [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] vm_util.copy_virtual_disk( [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] session._wait_for_task(vmdk_copy_task) [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] return self.wait_for_task(task_ref) [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] return evt.wait() [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] result = hub.switch() [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] return self.greenlet.switch() [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] self.f(*self.args, **self.kw) [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] raise exceptions.translate_fault(task_info.error) [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Faults: ['InvalidArgument'] [ 1595.418799] env[61855]: ERROR nova.compute.manager [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] [ 1595.419825] env[61855]: DEBUG nova.compute.utils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1595.421139] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Build of instance aa9a75c4-371f-407e-a79e-133606a9fabc was re-scheduled: A specified parameter was not correct: fileType [ 1595.421139] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1595.421532] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1595.421712] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1595.421885] env[61855]: DEBUG nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1595.422061] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1595.838662] env[61855]: DEBUG nova.network.neutron [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.851758] env[61855]: INFO nova.compute.manager [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Took 0.43 seconds to deallocate network for instance. [ 1595.980126] env[61855]: INFO nova.scheduler.client.report [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Deleted allocations for instance aa9a75c4-371f-407e-a79e-133606a9fabc [ 1596.003997] env[61855]: DEBUG oslo_concurrency.lockutils [None req-83e76d09-b290-49f4-bd9a-16b8c58f5e4f tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.222s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.004764] env[61855]: DEBUG oslo_concurrency.lockutils [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.942s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.006258] env[61855]: DEBUG oslo_concurrency.lockutils [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Acquiring lock "aa9a75c4-371f-407e-a79e-133606a9fabc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.006258] env[61855]: DEBUG oslo_concurrency.lockutils [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.006258] env[61855]: DEBUG oslo_concurrency.lockutils [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.007954] env[61855]: INFO nova.compute.manager [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Terminating instance [ 1596.012393] env[61855]: DEBUG nova.compute.manager [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1596.012393] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1596.012637] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6e8236d-88b4-4658-ab0d-2bd86236f467 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.018607] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1596.025474] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06114b1-76a4-45b6-b777-66622ab1ae31 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.054409] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa9a75c4-371f-407e-a79e-133606a9fabc could not be found. [ 1596.054617] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1596.054801] env[61855]: INFO nova.compute.manager [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1596.055061] env[61855]: DEBUG oslo.service.loopingcall [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1596.057394] env[61855]: DEBUG nova.compute.manager [-] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1596.057499] env[61855]: DEBUG nova.network.neutron [-] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1596.071887] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.072163] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.073715] env[61855]: INFO nova.compute.claims [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.084420] env[61855]: DEBUG nova.network.neutron [-] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.096075] env[61855]: INFO nova.compute.manager [-] [instance: aa9a75c4-371f-407e-a79e-133606a9fabc] Took 0.04 seconds to deallocate network for instance. [ 1596.192645] env[61855]: DEBUG oslo_concurrency.lockutils [None req-91a225f1-154e-47d7-b088-37f9d4539732 tempest-FloatingIPsAssociationNegativeTestJSON-339774653 tempest-FloatingIPsAssociationNegativeTestJSON-339774653-project-member] Lock "aa9a75c4-371f-407e-a79e-133606a9fabc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.281076] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c2504a-c34e-4eda-971a-f5804d264ca9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.288327] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3dd0423-a5d3-4fe6-bdf2-4d335672df29 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.321205] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f4581d-e6e9-41c2-88fe-1e98d8127a73 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.329345] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24599ad-7ef1-47bf-976e-ac0247990dee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.343195] env[61855]: DEBUG nova.compute.provider_tree [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1596.353876] env[61855]: DEBUG nova.scheduler.client.report [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1596.376345] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.376931] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1596.421059] env[61855]: DEBUG nova.compute.utils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1596.422835] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1596.422835] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1596.434311] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1596.497750] env[61855]: DEBUG nova.policy [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6555bca994941ef876a79f963c827e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e855a24a69fb43d8955f7f8fe8cbb3da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1596.501355] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1596.536123] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1596.536405] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1596.537025] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1596.537025] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1596.537025] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1596.537193] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1596.537279] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1596.537432] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1596.537600] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1596.537763] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1596.537935] env[61855]: DEBUG nova.virt.hardware [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1596.539079] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1269881f-67a9-41e8-8c03-4f4be3afcf71 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.546683] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcd3a29-3084-482d-89b1-c0ca32ca7847 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.852919] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Successfully created port: 4b0284d2-8bd1-4678-9c11-a5d0a17b7431 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1597.509143] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Successfully updated port: 4b0284d2-8bd1-4678-9c11-a5d0a17b7431 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1597.525653] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "refresh_cache-d7d51668-c93b-4db2-ab7c-10345258fbc7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.525653] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "refresh_cache-d7d51668-c93b-4db2-ab7c-10345258fbc7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.525653] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1597.598704] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1597.911719] env[61855]: DEBUG nova.compute.manager [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Received event network-vif-plugged-4b0284d2-8bd1-4678-9c11-a5d0a17b7431 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1597.911954] env[61855]: DEBUG oslo_concurrency.lockutils [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] Acquiring lock "d7d51668-c93b-4db2-ab7c-10345258fbc7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.912173] env[61855]: DEBUG oslo_concurrency.lockutils [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.912343] env[61855]: DEBUG oslo_concurrency.lockutils [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.912517] env[61855]: DEBUG nova.compute.manager [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] No waiting events found dispatching network-vif-plugged-4b0284d2-8bd1-4678-9c11-a5d0a17b7431 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1597.912723] env[61855]: WARNING nova.compute.manager [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Received unexpected event network-vif-plugged-4b0284d2-8bd1-4678-9c11-a5d0a17b7431 for instance with vm_state building and task_state spawning. [ 1597.912859] env[61855]: DEBUG nova.compute.manager [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Received event network-changed-4b0284d2-8bd1-4678-9c11-a5d0a17b7431 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1597.913037] env[61855]: DEBUG nova.compute.manager [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Refreshing instance network info cache due to event network-changed-4b0284d2-8bd1-4678-9c11-a5d0a17b7431. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1597.913214] env[61855]: DEBUG oslo_concurrency.lockutils [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] Acquiring lock "refresh_cache-d7d51668-c93b-4db2-ab7c-10345258fbc7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.954536] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Updating instance_info_cache with network_info: [{"id": "4b0284d2-8bd1-4678-9c11-a5d0a17b7431", "address": "fa:16:3e:a4:b1:10", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b0284d2-8b", "ovs_interfaceid": "4b0284d2-8bd1-4678-9c11-a5d0a17b7431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.965327] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "refresh_cache-d7d51668-c93b-4db2-ab7c-10345258fbc7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.965640] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Instance network_info: |[{"id": "4b0284d2-8bd1-4678-9c11-a5d0a17b7431", "address": "fa:16:3e:a4:b1:10", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b0284d2-8b", "ovs_interfaceid": "4b0284d2-8bd1-4678-9c11-a5d0a17b7431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1597.965952] env[61855]: DEBUG oslo_concurrency.lockutils [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] Acquired lock "refresh_cache-d7d51668-c93b-4db2-ab7c-10345258fbc7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.966151] env[61855]: DEBUG nova.network.neutron [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Refreshing network info cache for port 4b0284d2-8bd1-4678-9c11-a5d0a17b7431 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1597.967189] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:b1:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b0284d2-8bd1-4678-9c11-a5d0a17b7431', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1597.975806] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating folder: Project (e855a24a69fb43d8955f7f8fe8cbb3da). Parent ref: group-v847048. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1597.976779] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af1a8acf-8c22-47fd-92ec-cbd0c1fd212b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.989963] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Created folder: Project (e855a24a69fb43d8955f7f8fe8cbb3da) in parent group-v847048. [ 1597.990179] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating folder: Instances. Parent ref: group-v847144. {{(pid=61855) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1597.990428] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3c31608-125c-42e7-a599-7cece945760b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.998850] env[61855]: INFO nova.virt.vmwareapi.vm_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Created folder: Instances in parent group-v847144. [ 1597.999088] env[61855]: DEBUG oslo.service.loopingcall [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1597.999262] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1597.999448] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b27a4a5-9ef5-46d1-af4e-fa1b25455b13 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.019446] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1598.019446] env[61855]: value = "task-4302942" [ 1598.019446] env[61855]: _type = "Task" [ 1598.019446] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.026759] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302942, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.257312] env[61855]: DEBUG nova.network.neutron [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Updated VIF entry in instance network info cache for port 4b0284d2-8bd1-4678-9c11-a5d0a17b7431. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1598.257878] env[61855]: DEBUG nova.network.neutron [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Updating instance_info_cache with network_info: [{"id": "4b0284d2-8bd1-4678-9c11-a5d0a17b7431", "address": "fa:16:3e:a4:b1:10", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b0284d2-8b", "ovs_interfaceid": "4b0284d2-8bd1-4678-9c11-a5d0a17b7431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.270610] env[61855]: DEBUG oslo_concurrency.lockutils [req-6fdf3d19-7a5c-4a96-87dc-51e56d6286fd req-92a2aab9-22dd-4d08-9a0e-150adf552e63 service nova] Releasing lock "refresh_cache-d7d51668-c93b-4db2-ab7c-10345258fbc7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.528808] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302942, 'name': CreateVM_Task, 'duration_secs': 0.276893} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.529247] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1598.529919] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.530114] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.530460] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1598.530706] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4778df2f-7f77-4a2b-8049-df6906acdf34 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.534829] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 1598.534829] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52369947-3a4e-4671-30e8-55f39dbd7ade" [ 1598.534829] env[61855]: _type = "Task" [ 1598.534829] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.542073] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52369947-3a4e-4671-30e8-55f39dbd7ade, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.045066] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.045289] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1599.045500] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.919430] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.918572] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1610.943828] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.924058] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.924243] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1612.925361] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1612.925712] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1612.925712] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1612.946475] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.946475] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.946644] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.946644] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.948092] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.948092] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.948092] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.948092] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.948092] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.948092] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1612.948092] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1612.948092] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.924422] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.924675] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.936789] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.937107] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.937154] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.937289] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1613.938407] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1e40a6-bdb1-4c13-a118-110ac961255c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.947185] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e158b979-a58c-44ba-a464-edd826444b3f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.962425] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a3e8b6-abd9-4360-b24d-42b25512c362 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.968285] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99abae2f-dfe3-4bda-83e8-827e627f4b6d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.996880] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180687MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1613.997033] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.997228] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.069934] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 6fbd649d-1fce-440f-9911-09b74df51489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.070121] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.070254] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.070379] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.070542] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.070654] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.070776] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.070893] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.071017] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.071152] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1614.085445] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.095742] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.095956] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1614.096176] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1614.230561] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82223737-279c-47c5-bc7a-d41a8ee45b35 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.238126] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab0ae98-2dfd-4b33-bde4-bc8a070a82cf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.268110] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe0a3ea-0554-48d5-a19a-cde3e105b719 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.274770] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0c4ea1-b6be-4a23-8d86-5796071f095c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.287345] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1614.296722] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1614.310605] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1614.310797] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.314s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.310546] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1615.310883] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.678120] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "8653615e-3254-436e-984d-e52fdfb86ce4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.670185] env[61855]: WARNING oslo_vmware.rw_handles [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1642.670185] env[61855]: ERROR oslo_vmware.rw_handles [ 1642.670739] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1642.672812] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1642.674038] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Copying Virtual Disk [datastore2] vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/8cc3250a-2d81-4a39-b084-ad0dba2867a2/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1642.674038] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fa268d1-e678-4f3f-8375-8b372fe53b1c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.681273] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Waiting for the task: (returnval){ [ 1642.681273] env[61855]: value = "task-4302943" [ 1642.681273] env[61855]: _type = "Task" [ 1642.681273] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.688958] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Task: {'id': task-4302943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.192148] env[61855]: DEBUG oslo_vmware.exceptions [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1643.192442] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.193017] env[61855]: ERROR nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1643.193017] env[61855]: Faults: ['InvalidArgument'] [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Traceback (most recent call last): [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] yield resources [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self.driver.spawn(context, instance, image_meta, [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self._fetch_image_if_missing(context, vi) [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] image_cache(vi, tmp_image_ds_loc) [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] vm_util.copy_virtual_disk( [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] session._wait_for_task(vmdk_copy_task) [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] return self.wait_for_task(task_ref) [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] return evt.wait() [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] result = hub.switch() [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] return self.greenlet.switch() [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self.f(*self.args, **self.kw) [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] raise exceptions.translate_fault(task_info.error) [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Faults: ['InvalidArgument'] [ 1643.193017] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] [ 1643.194297] env[61855]: INFO nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Terminating instance [ 1643.194931] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.195225] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.195453] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b600bb44-e9ab-40b5-b0f9-4c56d6bb41ff {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.197508] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1643.197708] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1643.198493] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970f4403-f8b6-4e0d-8117-4916a3dfc9ed {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.204884] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1643.205131] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecf43d03-a9a7-4fa8-9c7b-667490d88cf2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.207258] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.207434] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1643.208396] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ff0fd84-ef4a-429e-ac2b-44a13f1d21b1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.213182] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Waiting for the task: (returnval){ [ 1643.213182] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]525a5c6a-0eb0-ae15-4881-1a9b75644734" [ 1643.213182] env[61855]: _type = "Task" [ 1643.213182] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.226967] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1643.227204] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Creating directory with path [datastore2] vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.227412] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28c91cee-aaf2-4162-b336-c7bc69c6823b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.237305] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Created directory with path [datastore2] vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.237484] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Fetch image to [datastore2] vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1643.237658] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1643.238412] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2631c1-e416-438c-b191-b9a8d9e83b60 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.246105] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9bcc40-6fac-4be1-869d-115a32bcf8b7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.255043] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9904d34-2f8f-4eb9-a343-ec5b6e7338b0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.287012] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bf8dd0-cdd2-41b9-8a3d-dc44bd7e4922 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.289701] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1643.289933] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1643.290166] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Deleting the datastore file [datastore2] 6fbd649d-1fce-440f-9911-09b74df51489 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.290426] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e702ef60-49c3-40cf-914f-1438ba148970 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.295321] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cc1a7479-ce28-492c-8195-de76c399d43a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.298075] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Waiting for the task: (returnval){ [ 1643.298075] env[61855]: value = "task-4302945" [ 1643.298075] env[61855]: _type = "Task" [ 1643.298075] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.305902] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Task: {'id': task-4302945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.315872] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1643.370455] env[61855]: DEBUG oslo_vmware.rw_handles [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1643.429253] env[61855]: DEBUG oslo_vmware.rw_handles [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1643.429442] env[61855]: DEBUG oslo_vmware.rw_handles [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1643.808189] env[61855]: DEBUG oslo_vmware.api [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Task: {'id': task-4302945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07027} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.808548] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1643.808592] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1643.808806] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1643.808966] env[61855]: INFO nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1643.811133] env[61855]: DEBUG nova.compute.claims [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1643.811316] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.811527] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.992020] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a05823-63d8-4e10-b36d-5ff704b15386 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.998966] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98594a8-1fb5-4266-9820-48c334d99f81 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.031089] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af57dd6-37d1-4482-a8cf-304abefcac68 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.038197] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbbcba3-62b0-4751-b61f-63082dc8f4a6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.050799] env[61855]: DEBUG nova.compute.provider_tree [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1644.058858] env[61855]: DEBUG nova.scheduler.client.report [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1644.073973] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.262s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.074508] env[61855]: ERROR nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1644.074508] env[61855]: Faults: ['InvalidArgument'] [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Traceback (most recent call last): [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self.driver.spawn(context, instance, image_meta, [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self._fetch_image_if_missing(context, vi) [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] image_cache(vi, tmp_image_ds_loc) [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] vm_util.copy_virtual_disk( [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] session._wait_for_task(vmdk_copy_task) [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] return self.wait_for_task(task_ref) [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] return evt.wait() [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] result = hub.switch() [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] return self.greenlet.switch() [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] self.f(*self.args, **self.kw) [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] raise exceptions.translate_fault(task_info.error) [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Faults: ['InvalidArgument'] [ 1644.074508] env[61855]: ERROR nova.compute.manager [instance: 6fbd649d-1fce-440f-9911-09b74df51489] [ 1644.075526] env[61855]: DEBUG nova.compute.utils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1644.076550] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Build of instance 6fbd649d-1fce-440f-9911-09b74df51489 was re-scheduled: A specified parameter was not correct: fileType [ 1644.076550] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1644.076910] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1644.077095] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1644.077266] env[61855]: DEBUG nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1644.077429] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1644.499456] env[61855]: DEBUG nova.network.neutron [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.512395] env[61855]: INFO nova.compute.manager [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Took 0.43 seconds to deallocate network for instance. [ 1644.621920] env[61855]: INFO nova.scheduler.client.report [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Deleted allocations for instance 6fbd649d-1fce-440f-9911-09b74df51489 [ 1644.646334] env[61855]: DEBUG oslo_concurrency.lockutils [None req-aade6262-be6c-46d6-8ec1-027dc2015115 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "6fbd649d-1fce-440f-9911-09b74df51489" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.892s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.647572] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "6fbd649d-1fce-440f-9911-09b74df51489" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.642s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.647804] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Acquiring lock "6fbd649d-1fce-440f-9911-09b74df51489-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.648022] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "6fbd649d-1fce-440f-9911-09b74df51489-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.648188] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "6fbd649d-1fce-440f-9911-09b74df51489-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.654020] env[61855]: INFO nova.compute.manager [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Terminating instance [ 1644.654020] env[61855]: DEBUG nova.compute.manager [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1644.654020] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1644.654020] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf17d15a-a628-485c-aaa6-59a902d8a758 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.663361] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8041925-072e-4449-889a-95c405ba6aa2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.674051] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1644.696575] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6fbd649d-1fce-440f-9911-09b74df51489 could not be found. [ 1644.696785] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1644.696964] env[61855]: INFO nova.compute.manager [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1644.697238] env[61855]: DEBUG oslo.service.loopingcall [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.697466] env[61855]: DEBUG nova.compute.manager [-] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1644.697559] env[61855]: DEBUG nova.network.neutron [-] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1644.721766] env[61855]: DEBUG nova.network.neutron [-] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.723680] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.723906] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.725361] env[61855]: INFO nova.compute.claims [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1644.728901] env[61855]: INFO nova.compute.manager [-] [instance: 6fbd649d-1fce-440f-9911-09b74df51489] Took 0.03 seconds to deallocate network for instance. [ 1644.823252] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b943216a-324f-4de0-97b5-24f69d1f40e0 tempest-ServerPasswordTestJSON-1530707316 tempest-ServerPasswordTestJSON-1530707316-project-member] Lock "6fbd649d-1fce-440f-9911-09b74df51489" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.910269] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44555ed-1185-43f4-87c5-8b3f8de2745a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.917906] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a364abb9-6e09-4327-84b4-684e9aa7f436 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.947288] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68637f39-f979-48e5-9afe-6118161b5bfd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.954642] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2481b6e2-c9b4-4b3b-859f-7fb1e2cbffc6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.967197] env[61855]: DEBUG nova.compute.provider_tree [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1644.976558] env[61855]: DEBUG nova.scheduler.client.report [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1644.990616] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.267s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.991121] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1645.023212] env[61855]: DEBUG nova.compute.utils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1645.024492] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1645.024656] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1645.034257] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1645.096016] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1645.101151] env[61855]: DEBUG nova.policy [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5256e05619f744e988b78876f04b7286', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a6c123dab04b01868b291d2b953e75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1645.120671] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1645.120969] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1645.121184] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1645.121689] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1645.121827] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1645.122086] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1645.122246] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1645.122413] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1645.122581] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1645.122745] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1645.122920] env[61855]: DEBUG nova.virt.hardware [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1645.123807] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f921f7-80dc-49c5-9ae9-db16a59a2221 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.131880] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0a3570-cb38-44e0-a684-2005c0eb7d48 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.470351] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Successfully created port: 203d3832-858b-4428-80a1-8a4e340dc411 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1646.145526] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Successfully updated port: 203d3832-858b-4428-80a1-8a4e340dc411 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1646.156407] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "refresh_cache-feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.156548] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "refresh_cache-feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.156695] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1646.198952] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1646.548076] env[61855]: DEBUG nova.compute.manager [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Received event network-vif-plugged-203d3832-858b-4428-80a1-8a4e340dc411 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1646.548307] env[61855]: DEBUG oslo_concurrency.lockutils [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] Acquiring lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.548512] env[61855]: DEBUG oslo_concurrency.lockutils [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.548683] env[61855]: DEBUG oslo_concurrency.lockutils [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.548889] env[61855]: DEBUG nova.compute.manager [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] No waiting events found dispatching network-vif-plugged-203d3832-858b-4428-80a1-8a4e340dc411 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1646.549234] env[61855]: WARNING nova.compute.manager [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Received unexpected event network-vif-plugged-203d3832-858b-4428-80a1-8a4e340dc411 for instance with vm_state building and task_state spawning. [ 1646.549379] env[61855]: DEBUG nova.compute.manager [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Received event network-changed-203d3832-858b-4428-80a1-8a4e340dc411 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1646.549594] env[61855]: DEBUG nova.compute.manager [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Refreshing instance network info cache due to event network-changed-203d3832-858b-4428-80a1-8a4e340dc411. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1646.549713] env[61855]: DEBUG oslo_concurrency.lockutils [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] Acquiring lock "refresh_cache-feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.628778] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Updating instance_info_cache with network_info: [{"id": "203d3832-858b-4428-80a1-8a4e340dc411", "address": "fa:16:3e:31:1f:79", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap203d3832-85", "ovs_interfaceid": "203d3832-858b-4428-80a1-8a4e340dc411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.643271] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "refresh_cache-feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.643570] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Instance network_info: |[{"id": "203d3832-858b-4428-80a1-8a4e340dc411", "address": "fa:16:3e:31:1f:79", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap203d3832-85", "ovs_interfaceid": "203d3832-858b-4428-80a1-8a4e340dc411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1646.643921] env[61855]: DEBUG oslo_concurrency.lockutils [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] Acquired lock "refresh_cache-feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.644147] env[61855]: DEBUG nova.network.neutron [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Refreshing network info cache for port 203d3832-858b-4428-80a1-8a4e340dc411 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1646.645243] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:1f:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f925dc8-2145-457e-a4d4-c07117356dd0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '203d3832-858b-4428-80a1-8a4e340dc411', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1646.653093] env[61855]: DEBUG oslo.service.loopingcall [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1646.655940] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1646.656765] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8dc40afc-db7c-476e-bb01-7e531ac3c671 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.677635] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1646.677635] env[61855]: value = "task-4302946" [ 1646.677635] env[61855]: _type = "Task" [ 1646.677635] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.685598] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302946, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.004211] env[61855]: DEBUG nova.network.neutron [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Updated VIF entry in instance network info cache for port 203d3832-858b-4428-80a1-8a4e340dc411. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1647.004582] env[61855]: DEBUG nova.network.neutron [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Updating instance_info_cache with network_info: [{"id": "203d3832-858b-4428-80a1-8a4e340dc411", "address": "fa:16:3e:31:1f:79", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap203d3832-85", "ovs_interfaceid": "203d3832-858b-4428-80a1-8a4e340dc411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.014035] env[61855]: DEBUG oslo_concurrency.lockutils [req-1e595102-2f52-471e-aa1e-9f3071f5bb71 req-f7fba1ff-6c7e-40b0-ae07-071f2e8644e9 service nova] Releasing lock "refresh_cache-feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.187479] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302946, 'name': CreateVM_Task, 'duration_secs': 0.292496} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.187832] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1647.188273] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.188439] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.188755] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1647.189033] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4769c9e1-b958-4914-b7eb-9ae97bdb9bf8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.193712] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1647.193712] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52ae7260-6c0f-4adf-3d5c-2543134c105b" [ 1647.193712] env[61855]: _type = "Task" [ 1647.193712] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.201483] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52ae7260-6c0f-4adf-3d5c-2543134c105b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.705587] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.705946] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1647.706098] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.597338] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "e49e5769-b561-48e0-9009-21e71844238f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.597939] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "e49e5769-b561-48e0-9009-21e71844238f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.622858] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "c63748b0-5d55-447f-a11d-f0704edc1e86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.623095] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.052147] env[61855]: DEBUG oslo_concurrency.lockutils [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.924358] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.919751] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.924903] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.924903] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1673.925663] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1673.925993] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1673.925993] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1673.947736] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.947928] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948027] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948158] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948298] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948417] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948539] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948658] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948777] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.948895] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1673.949027] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1673.949509] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1674.924182] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.923603] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.923940] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.924016] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.935887] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.936154] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.936367] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.936565] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1675.937699] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5493a683-076a-4f83-9392-f3c929ae3450 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.946676] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9676a8-479e-4d2e-81e5-365c7d80ecf8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.960215] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a558c5bc-4f40-4cc1-a42d-67428b5b9a94 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.966299] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8474a4ff-c08b-450f-aa4b-3c435b0e1005 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.996242] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180681MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1675.996386] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.996626] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.069851] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070019] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070151] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070275] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070397] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070516] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070633] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070749] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070864] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.070977] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1676.085959] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1676.095899] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1676.125096] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1676.125332] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1676.125479] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1676.266319] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dc376f-cd98-462d-8cfc-9b7c02f6186b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.273479] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da9df87-875c-45ba-afbd-49de97b94548 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.302594] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3930daac-d0ce-4601-af00-60ae35a49450 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.309206] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701ffa75-614b-4032-b5fb-22a29ba91b40 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.323681] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1676.331976] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1676.345739] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1676.345922] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.349s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.971523] env[61855]: WARNING oslo_vmware.rw_handles [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1689.971523] env[61855]: ERROR oslo_vmware.rw_handles [ 1689.972248] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1689.973793] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1689.974065] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Copying Virtual Disk [datastore2] vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/686bc0be-92fb-4cbf-bce8-b836b0606533/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1689.974337] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2875a490-5819-45c1-b0e7-df117b8be02d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.982348] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Waiting for the task: (returnval){ [ 1689.982348] env[61855]: value = "task-4302947" [ 1689.982348] env[61855]: _type = "Task" [ 1689.982348] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.990194] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Task: {'id': task-4302947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.492716] env[61855]: DEBUG oslo_vmware.exceptions [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1690.492967] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.493524] env[61855]: ERROR nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1690.493524] env[61855]: Faults: ['InvalidArgument'] [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Traceback (most recent call last): [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] yield resources [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self.driver.spawn(context, instance, image_meta, [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self._fetch_image_if_missing(context, vi) [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] image_cache(vi, tmp_image_ds_loc) [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] vm_util.copy_virtual_disk( [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] session._wait_for_task(vmdk_copy_task) [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] return self.wait_for_task(task_ref) [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] return evt.wait() [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] result = hub.switch() [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] return self.greenlet.switch() [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self.f(*self.args, **self.kw) [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] raise exceptions.translate_fault(task_info.error) [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Faults: ['InvalidArgument'] [ 1690.493524] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] [ 1690.494524] env[61855]: INFO nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Terminating instance [ 1690.495383] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.495662] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1690.496097] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.496255] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquired lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.496421] env[61855]: DEBUG nova.network.neutron [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1690.497324] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a687a388-85c1-4c1c-9487-6aa0274147f0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.506877] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1690.507148] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1690.508126] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f58351ce-c36f-43f5-88b0-5f95b0eabaf2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.513285] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Waiting for the task: (returnval){ [ 1690.513285] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c63582-3767-f91d-861a-d47265405d17" [ 1690.513285] env[61855]: _type = "Task" [ 1690.513285] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.521008] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c63582-3767-f91d-861a-d47265405d17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.558603] env[61855]: DEBUG nova.network.neutron [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1690.630461] env[61855]: DEBUG nova.network.neutron [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.641795] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Releasing lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.642225] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1690.642420] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1690.643535] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ce8948-468b-48eb-9cc4-67e5541eb538 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.651133] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1690.651358] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4e87ca5-ede1-4ac8-8d01-3c22bfd8c949 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.684941] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1690.685158] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1690.685341] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Deleting the datastore file [datastore2] a155c5fa-7988-4e53-a26f-7fbccdeb43b4 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1690.685582] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86e6e962-4633-4117-812f-09564397d675 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.690970] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Waiting for the task: (returnval){ [ 1690.690970] env[61855]: value = "task-4302949" [ 1690.690970] env[61855]: _type = "Task" [ 1690.690970] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.698205] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Task: {'id': task-4302949, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.023218] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1691.023602] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Creating directory with path [datastore2] vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1691.023725] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7b06351-05c7-442f-b933-7f0e0ab9bd95 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.034775] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Created directory with path [datastore2] vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1691.034990] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Fetch image to [datastore2] vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1691.035138] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1691.035831] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f083b962-7a18-4e23-b57b-10789cd8050a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.042233] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aee9c23-4d32-4106-9653-105f8567b44b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.052177] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ac982a-a935-48cb-b9e3-4c5c225ddb50 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.082181] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bf3aed-d9de-4ad7-b4a4-fd9627de1c0a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.088097] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3cc762bc-3c51-4da5-9e99-32d477a31136 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.109485] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1691.161226] env[61855]: DEBUG oslo_vmware.rw_handles [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1691.219691] env[61855]: DEBUG oslo_vmware.rw_handles [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1691.219873] env[61855]: DEBUG oslo_vmware.rw_handles [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1691.224156] env[61855]: DEBUG oslo_vmware.api [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Task: {'id': task-4302949, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034636} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.224416] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1691.224610] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1691.224806] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1691.225010] env[61855]: INFO nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Took 0.58 seconds to destroy the instance on the hypervisor. [ 1691.225281] env[61855]: DEBUG oslo.service.loopingcall [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1691.225515] env[61855]: DEBUG nova.compute.manager [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network deallocation for instance since networking was not requested. {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1691.227899] env[61855]: DEBUG nova.compute.claims [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1691.228097] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.228338] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.429474] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623cff67-4dbe-4013-8f8b-b7bc7599ac38 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.437014] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd87fc38-1870-4b16-b604-4e307293a3d0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.467057] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ac2060-340b-4cb8-a258-a0409855ef7c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.474050] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dd85e7-f958-4e55-b8ef-e49325411c57 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.486731] env[61855]: DEBUG nova.compute.provider_tree [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1691.499699] env[61855]: DEBUG nova.scheduler.client.report [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1691.516221] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.287s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.516221] env[61855]: ERROR nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1691.516221] env[61855]: Faults: ['InvalidArgument'] [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Traceback (most recent call last): [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self.driver.spawn(context, instance, image_meta, [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self._fetch_image_if_missing(context, vi) [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] image_cache(vi, tmp_image_ds_loc) [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] vm_util.copy_virtual_disk( [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] session._wait_for_task(vmdk_copy_task) [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] return self.wait_for_task(task_ref) [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] return evt.wait() [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] result = hub.switch() [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] return self.greenlet.switch() [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] self.f(*self.args, **self.kw) [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] raise exceptions.translate_fault(task_info.error) [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Faults: ['InvalidArgument'] [ 1691.516221] env[61855]: ERROR nova.compute.manager [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] [ 1691.517336] env[61855]: DEBUG nova.compute.utils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1691.518144] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Build of instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 was re-scheduled: A specified parameter was not correct: fileType [ 1691.518144] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1691.518517] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1691.518740] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.518885] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquired lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.519063] env[61855]: DEBUG nova.network.neutron [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1691.551590] env[61855]: DEBUG nova.network.neutron [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1691.629872] env[61855]: DEBUG nova.network.neutron [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.638875] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Releasing lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.639099] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1691.639284] env[61855]: DEBUG nova.compute.manager [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Skipping network deallocation for instance since networking was not requested. {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1691.734123] env[61855]: INFO nova.scheduler.client.report [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Deleted allocations for instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 [ 1691.753256] env[61855]: DEBUG oslo_concurrency.lockutils [None req-67a83c12-fb7c-4ffa-b377-f2f9af9091a2 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 529.488s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.754453] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 333.072s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.754682] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.754906] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.755105] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.756991] env[61855]: INFO nova.compute.manager [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Terminating instance [ 1691.758495] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquiring lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.758656] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Acquired lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.758825] env[61855]: DEBUG nova.network.neutron [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1691.768178] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1691.785781] env[61855]: DEBUG nova.network.neutron [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1691.825914] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.826198] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.827663] env[61855]: INFO nova.compute.claims [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1691.858300] env[61855]: DEBUG nova.network.neutron [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.867478] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Releasing lock "refresh_cache-a155c5fa-7988-4e53-a26f-7fbccdeb43b4" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.867899] env[61855]: DEBUG nova.compute.manager [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1691.868104] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1691.868602] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d61131b1-9587-44ac-84b5-a75536e0169a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.878511] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b066a232-181a-4298-bf9a-4cb228f91bb5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.913300] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a155c5fa-7988-4e53-a26f-7fbccdeb43b4 could not be found. [ 1691.913538] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1691.913740] env[61855]: INFO nova.compute.manager [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1691.913981] env[61855]: DEBUG oslo.service.loopingcall [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1691.916380] env[61855]: DEBUG nova.compute.manager [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1691.916500] env[61855]: DEBUG nova.network.neutron [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1691.935558] env[61855]: DEBUG nova.network.neutron [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1691.945133] env[61855]: DEBUG nova.network.neutron [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.957768] env[61855]: INFO nova.compute.manager [-] [instance: a155c5fa-7988-4e53-a26f-7fbccdeb43b4] Took 0.04 seconds to deallocate network for instance. [ 1692.026271] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597bd2f4-09cf-44bf-890b-7c9fca4af1a8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.033673] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42efbe6e-4523-4949-b24c-aaeb27724225 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.065233] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1fbcee-9c47-49be-b37b-abd11604987f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.067770] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bbf41701-246a-4b30-9970-4d62941faa79 tempest-ServerDiagnosticsV248Test-897721723 tempest-ServerDiagnosticsV248Test-897721723-project-member] Lock "a155c5fa-7988-4e53-a26f-7fbccdeb43b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.313s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.073467] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2076f31d-8331-4f53-ae71-a07a15a06782 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.086898] env[61855]: DEBUG nova.compute.provider_tree [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.094530] env[61855]: DEBUG nova.scheduler.client.report [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1692.107497] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.107936] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1692.140526] env[61855]: DEBUG nova.compute.utils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1692.141799] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1692.141973] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1692.154597] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1692.215769] env[61855]: DEBUG nova.policy [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6365552b80dc401d8a166f179d231b10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ec8ab8fc8404ec8a37780aa6e6fd40e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1692.218842] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1692.246193] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1692.246444] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1692.246606] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1692.246793] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1692.246942] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1692.247103] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1692.247311] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1692.247474] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1692.247643] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1692.247812] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1692.247993] env[61855]: DEBUG nova.virt.hardware [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1692.248881] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983d43b4-037e-4572-b3d5-e9ec87765793 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.257281] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51532e5-77c7-42ab-8330-d47164f704cc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.604059] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Successfully created port: 01bbcb32-03b1-44e0-8a2a-9da7367f4bb6 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1693.333891] env[61855]: DEBUG nova.compute.manager [req-863538c6-94b8-4683-ade2-af34a554fa43 req-4f4fa1d0-4c9d-4f3d-9215-f465cc7cc525 service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Received event network-vif-plugged-01bbcb32-03b1-44e0-8a2a-9da7367f4bb6 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1693.334271] env[61855]: DEBUG oslo_concurrency.lockutils [req-863538c6-94b8-4683-ade2-af34a554fa43 req-4f4fa1d0-4c9d-4f3d-9215-f465cc7cc525 service nova] Acquiring lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.334315] env[61855]: DEBUG oslo_concurrency.lockutils [req-863538c6-94b8-4683-ade2-af34a554fa43 req-4f4fa1d0-4c9d-4f3d-9215-f465cc7cc525 service nova] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.334482] env[61855]: DEBUG oslo_concurrency.lockutils [req-863538c6-94b8-4683-ade2-af34a554fa43 req-4f4fa1d0-4c9d-4f3d-9215-f465cc7cc525 service nova] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.334968] env[61855]: DEBUG nova.compute.manager [req-863538c6-94b8-4683-ade2-af34a554fa43 req-4f4fa1d0-4c9d-4f3d-9215-f465cc7cc525 service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] No waiting events found dispatching network-vif-plugged-01bbcb32-03b1-44e0-8a2a-9da7367f4bb6 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1693.334968] env[61855]: WARNING nova.compute.manager [req-863538c6-94b8-4683-ade2-af34a554fa43 req-4f4fa1d0-4c9d-4f3d-9215-f465cc7cc525 service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Received unexpected event network-vif-plugged-01bbcb32-03b1-44e0-8a2a-9da7367f4bb6 for instance with vm_state building and task_state spawning. [ 1693.480362] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Successfully updated port: 01bbcb32-03b1-44e0-8a2a-9da7367f4bb6 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1693.494062] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "refresh_cache-ba293405-d3ea-4a1d-b21d-c44bff58dcb6" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.494649] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "refresh_cache-ba293405-d3ea-4a1d-b21d-c44bff58dcb6" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.494649] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1693.550306] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1693.718120] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Updating instance_info_cache with network_info: [{"id": "01bbcb32-03b1-44e0-8a2a-9da7367f4bb6", "address": "fa:16:3e:4e:45:a7", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01bbcb32-03", "ovs_interfaceid": "01bbcb32-03b1-44e0-8a2a-9da7367f4bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.734588] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "refresh_cache-ba293405-d3ea-4a1d-b21d-c44bff58dcb6" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.734867] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Instance network_info: |[{"id": "01bbcb32-03b1-44e0-8a2a-9da7367f4bb6", "address": "fa:16:3e:4e:45:a7", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01bbcb32-03", "ovs_interfaceid": "01bbcb32-03b1-44e0-8a2a-9da7367f4bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1693.735281] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:45:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01bbcb32-03b1-44e0-8a2a-9da7367f4bb6', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1693.742962] env[61855]: DEBUG oslo.service.loopingcall [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1693.743417] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1693.743640] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9020606d-1aca-4f28-b608-1a5644774948 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.763669] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1693.763669] env[61855]: value = "task-4302950" [ 1693.763669] env[61855]: _type = "Task" [ 1693.763669] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.771038] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302950, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.274473] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302950, 'name': CreateVM_Task, 'duration_secs': 0.301556} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.274646] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1694.275334] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.275501] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.275815] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1694.276068] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6bba2eb-e27d-4b1b-a6cd-a93a01162b29 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.279983] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 1694.279983] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]528245a5-5bf3-df10-64df-1b40056685ea" [ 1694.279983] env[61855]: _type = "Task" [ 1694.279983] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.286908] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]528245a5-5bf3-df10-64df-1b40056685ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.790506] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.790826] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1694.790933] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.367306] env[61855]: DEBUG nova.compute.manager [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Received event network-changed-01bbcb32-03b1-44e0-8a2a-9da7367f4bb6 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1695.367399] env[61855]: DEBUG nova.compute.manager [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Refreshing instance network info cache due to event network-changed-01bbcb32-03b1-44e0-8a2a-9da7367f4bb6. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1695.367613] env[61855]: DEBUG oslo_concurrency.lockutils [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] Acquiring lock "refresh_cache-ba293405-d3ea-4a1d-b21d-c44bff58dcb6" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.367756] env[61855]: DEBUG oslo_concurrency.lockutils [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] Acquired lock "refresh_cache-ba293405-d3ea-4a1d-b21d-c44bff58dcb6" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.367916] env[61855]: DEBUG nova.network.neutron [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Refreshing network info cache for port 01bbcb32-03b1-44e0-8a2a-9da7367f4bb6 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1695.637475] env[61855]: DEBUG nova.network.neutron [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Updated VIF entry in instance network info cache for port 01bbcb32-03b1-44e0-8a2a-9da7367f4bb6. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1695.637841] env[61855]: DEBUG nova.network.neutron [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Updating instance_info_cache with network_info: [{"id": "01bbcb32-03b1-44e0-8a2a-9da7367f4bb6", "address": "fa:16:3e:4e:45:a7", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01bbcb32-03", "ovs_interfaceid": "01bbcb32-03b1-44e0-8a2a-9da7367f4bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.646970] env[61855]: DEBUG oslo_concurrency.lockutils [req-e2fafe0e-6d11-433d-ab14-0b383b437254 req-d40d5327-0d96-4b33-a2e9-c6bf2caf8efc service nova] Releasing lock "refresh_cache-ba293405-d3ea-4a1d-b21d-c44bff58dcb6" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.926142] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.926497] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61855) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1731.926069] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1731.926069] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1731.948673] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1732.932318] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.923908] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.924174] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.924324] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1733.924495] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.924621] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1733.934786] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] There are 0 instances to clean {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1734.935644] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1734.936026] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1734.936026] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1734.955321] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.955484] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.955616] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.955742] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.955866] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.955986] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.956162] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.956293] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.956414] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.956534] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1734.956656] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1735.924609] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.924896] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.925023] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.937060] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.937452] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.937452] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.937600] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1735.938788] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cdb3ce-59f3-4a42-9f14-cf0f420b2230 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.947449] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0579b7f9-dc7b-4346-828c-9903119b3154 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.960943] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42842569-f7cb-4cc2-adf7-b994fd91a3a8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.967016] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb24ffe-fc98-4db5-8dd5-227144b1fd54 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.996497] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180676MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1735.996675] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.996837] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.159697] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.159952] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance cf66f181-60e6-43d4-a561-a32e9174448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.160165] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.160362] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.160550] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.160733] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.160930] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.161133] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.161317] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.161495] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1736.175301] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1736.186016] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1736.186243] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1736.186392] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1736.330537] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d9b623-5f00-440b-b93e-5c74d3cdf43f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.338306] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b5e905-88ce-4455-9281-1dac1e11ba6e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.370117] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde6085b-1a9f-497c-99c2-c49a7f01d367 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.377088] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b71d1d1-fc70-4522-b447-08da32644fb6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.389723] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1736.398373] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1736.412525] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1736.412700] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.416s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.942540] env[61855]: WARNING oslo_vmware.rw_handles [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1737.942540] env[61855]: ERROR oslo_vmware.rw_handles [ 1737.943462] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1737.944879] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1737.945137] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Copying Virtual Disk [datastore2] vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/2e8cd775-88ca-4b5a-9177-3df952c0c252/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1737.945420] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df7365ba-20e7-4d1a-9f6b-5f3a5a00ba01 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.953337] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Waiting for the task: (returnval){ [ 1737.953337] env[61855]: value = "task-4302951" [ 1737.953337] env[61855]: _type = "Task" [ 1737.953337] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.961300] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Task: {'id': task-4302951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.412460] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1738.462958] env[61855]: DEBUG oslo_vmware.exceptions [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1738.463258] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.463828] env[61855]: ERROR nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1738.463828] env[61855]: Faults: ['InvalidArgument'] [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Traceback (most recent call last): [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] yield resources [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self.driver.spawn(context, instance, image_meta, [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self._fetch_image_if_missing(context, vi) [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] image_cache(vi, tmp_image_ds_loc) [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] vm_util.copy_virtual_disk( [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] session._wait_for_task(vmdk_copy_task) [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] return self.wait_for_task(task_ref) [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] return evt.wait() [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] result = hub.switch() [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] return self.greenlet.switch() [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self.f(*self.args, **self.kw) [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] raise exceptions.translate_fault(task_info.error) [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Faults: ['InvalidArgument'] [ 1738.463828] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] [ 1738.464939] env[61855]: INFO nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Terminating instance [ 1738.465679] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.465886] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1738.466386] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-336c80bc-77a2-4ff6-b0d9-ba23bf88eeef {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.468441] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1738.468632] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1738.469353] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a51c6c-8528-4765-8e80-88957172a9e3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.475999] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1738.476235] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d296e754-b3f6-47c3-956a-1bfc410d1493 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.478257] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1738.478431] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1738.479343] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a646f832-e8ea-4cdd-87e5-a6901ab65bbe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.484354] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1738.484354] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52753269-759c-6cdd-1472-270a495adb4a" [ 1738.484354] env[61855]: _type = "Task" [ 1738.484354] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.492939] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52753269-759c-6cdd-1472-270a495adb4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.550175] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1738.550969] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1738.550969] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Deleting the datastore file [datastore2] accbccfe-3858-4a4c-b47b-3f12976c8c20 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1738.550969] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0f84ea4-9800-44fc-b34a-4feab6314280 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.557184] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Waiting for the task: (returnval){ [ 1738.557184] env[61855]: value = "task-4302953" [ 1738.557184] env[61855]: _type = "Task" [ 1738.557184] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.564790] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Task: {'id': task-4302953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.995990] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1738.996374] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1738.996510] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-993231b6-8634-40f7-be37-39da820ac765 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.008525] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.008796] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Fetch image to [datastore2] vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1739.009060] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1739.009938] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cba45e-0df0-4db6-8ed8-16cc0318cde7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.016848] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975af855-ebc8-4123-96be-399279821569 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.025551] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e868c94-dd2c-4bd9-85a2-b41e29b12aca {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.056384] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269b0c73-f9a2-4b57-ac3b-b4eb76252bf7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.066790] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b3cc924f-0658-4368-b82b-6e7c7a89078f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.068364] env[61855]: DEBUG oslo_vmware.api [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Task: {'id': task-4302953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066419} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.068605] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1739.068794] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1739.068967] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1739.069155] env[61855]: INFO nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1739.071286] env[61855]: DEBUG nova.compute.claims [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1739.071503] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.071717] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.089659] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1739.146987] env[61855]: DEBUG oslo_vmware.rw_handles [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1739.204980] env[61855]: DEBUG oslo_vmware.rw_handles [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1739.205187] env[61855]: DEBUG oslo_vmware.rw_handles [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1739.312511] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309b4951-2118-4608-bfd2-f6bce0b3c80d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.320564] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644d381d-597a-41f0-bdc2-531b407ab978 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.351408] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b67aa16-f639-47f9-8ad5-f1da9df66b0e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.358474] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59edda34-d2bf-427e-b220-a9e9a1aae168 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.371385] env[61855]: DEBUG nova.compute.provider_tree [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1739.380056] env[61855]: DEBUG nova.scheduler.client.report [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1739.393494] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.394010] env[61855]: ERROR nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1739.394010] env[61855]: Faults: ['InvalidArgument'] [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Traceback (most recent call last): [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self.driver.spawn(context, instance, image_meta, [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self._fetch_image_if_missing(context, vi) [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] image_cache(vi, tmp_image_ds_loc) [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] vm_util.copy_virtual_disk( [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] session._wait_for_task(vmdk_copy_task) [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] return self.wait_for_task(task_ref) [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] return evt.wait() [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] result = hub.switch() [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] return self.greenlet.switch() [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] self.f(*self.args, **self.kw) [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] raise exceptions.translate_fault(task_info.error) [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Faults: ['InvalidArgument'] [ 1739.394010] env[61855]: ERROR nova.compute.manager [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] [ 1739.395012] env[61855]: DEBUG nova.compute.utils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1739.396130] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Build of instance accbccfe-3858-4a4c-b47b-3f12976c8c20 was re-scheduled: A specified parameter was not correct: fileType [ 1739.396130] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1739.396528] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1739.396723] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1739.396900] env[61855]: DEBUG nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1739.397079] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1739.788171] env[61855]: DEBUG nova.network.neutron [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.801166] env[61855]: INFO nova.compute.manager [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Took 0.40 seconds to deallocate network for instance. [ 1739.896187] env[61855]: INFO nova.scheduler.client.report [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Deleted allocations for instance accbccfe-3858-4a4c-b47b-3f12976c8c20 [ 1739.917171] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bdca363d-26a5-4c8b-bbfb-e14ef3f50352 tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 550.094s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.918307] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 353.928s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.918524] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Acquiring lock "accbccfe-3858-4a4c-b47b-3f12976c8c20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.918734] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.918926] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.921022] env[61855]: INFO nova.compute.manager [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Terminating instance [ 1739.922802] env[61855]: DEBUG nova.compute.manager [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1739.923044] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1739.923605] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff2918ef-fe1c-4cd8-b395-26f8fdeae747 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.933939] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2224bce9-570c-47e9-b2f7-7836c572df6b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.944398] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1739.964999] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance accbccfe-3858-4a4c-b47b-3f12976c8c20 could not be found. [ 1739.966026] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1739.966026] env[61855]: INFO nova.compute.manager [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1739.966026] env[61855]: DEBUG oslo.service.loopingcall [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.966026] env[61855]: DEBUG nova.compute.manager [-] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1739.966026] env[61855]: DEBUG nova.network.neutron [-] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1739.999845] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.000145] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.001595] env[61855]: INFO nova.compute.claims [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1740.004718] env[61855]: DEBUG nova.network.neutron [-] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.015603] env[61855]: INFO nova.compute.manager [-] [instance: accbccfe-3858-4a4c-b47b-3f12976c8c20] Took 0.05 seconds to deallocate network for instance. [ 1740.105498] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9a5062aa-bb55-4463-bdc4-706509f55ead tempest-ServerRescueTestJSONUnderV235-1022238645 tempest-ServerRescueTestJSONUnderV235-1022238645-project-member] Lock "accbccfe-3858-4a4c-b47b-3f12976c8c20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.191254] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136d6d22-d146-401b-8da6-7c3fe872078d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.198934] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb01facd-a2a8-4a35-9952-8ad9f2bc2ba9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.228603] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffa83f0-32f3-49d4-aee7-22a67ab1e6f1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.235887] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6ebc36-c9f8-4539-9285-4262478da722 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.249194] env[61855]: DEBUG nova.compute.provider_tree [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.259173] env[61855]: DEBUG nova.scheduler.client.report [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1740.273042] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.273s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.273529] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1740.307712] env[61855]: DEBUG nova.compute.utils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1740.309624] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1740.309892] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1740.318932] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1740.386703] env[61855]: DEBUG nova.policy [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5c2a17d1334b36a2495d9b9bda5783', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c13499e461e24fc6964aa428afe66651', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1740.400285] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1740.426562] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1740.426817] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1740.426978] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1740.427177] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1740.427329] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1740.427476] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1740.427799] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1740.428009] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1740.428194] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1740.428365] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1740.428540] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1740.429444] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbd0aaa-c012-4b28-9c9f-1f59cf1f3fe4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.437883] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6105ccb2-c171-4005-8ff6-108ef9cb4699 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.952451] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Successfully created port: 9c3d513b-f06d-467b-b341-fad8c2e38a9b {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1741.603468] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Successfully updated port: 9c3d513b-f06d-467b-b341-fad8c2e38a9b {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1741.613669] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "refresh_cache-e49e5769-b561-48e0-9009-21e71844238f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.613839] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "refresh_cache-e49e5769-b561-48e0-9009-21e71844238f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.614017] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1741.661291] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1741.823776] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Updating instance_info_cache with network_info: [{"id": "9c3d513b-f06d-467b-b341-fad8c2e38a9b", "address": "fa:16:3e:5a:b7:6d", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c3d513b-f0", "ovs_interfaceid": "9c3d513b-f06d-467b-b341-fad8c2e38a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.826759] env[61855]: DEBUG nova.compute.manager [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] Received event network-vif-plugged-9c3d513b-f06d-467b-b341-fad8c2e38a9b {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1741.826973] env[61855]: DEBUG oslo_concurrency.lockutils [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] Acquiring lock "e49e5769-b561-48e0-9009-21e71844238f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.827198] env[61855]: DEBUG oslo_concurrency.lockutils [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] Lock "e49e5769-b561-48e0-9009-21e71844238f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.827367] env[61855]: DEBUG oslo_concurrency.lockutils [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] Lock "e49e5769-b561-48e0-9009-21e71844238f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.827535] env[61855]: DEBUG nova.compute.manager [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] No waiting events found dispatching network-vif-plugged-9c3d513b-f06d-467b-b341-fad8c2e38a9b {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1741.827699] env[61855]: WARNING nova.compute.manager [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] Received unexpected event network-vif-plugged-9c3d513b-f06d-467b-b341-fad8c2e38a9b for instance with vm_state building and task_state spawning. [ 1741.827860] env[61855]: DEBUG nova.compute.manager [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] Received event network-changed-9c3d513b-f06d-467b-b341-fad8c2e38a9b {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1741.828026] env[61855]: DEBUG nova.compute.manager [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] Refreshing instance network info cache due to event network-changed-9c3d513b-f06d-467b-b341-fad8c2e38a9b. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1741.828198] env[61855]: DEBUG oslo_concurrency.lockutils [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] Acquiring lock "refresh_cache-e49e5769-b561-48e0-9009-21e71844238f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.836773] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "refresh_cache-e49e5769-b561-48e0-9009-21e71844238f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.837049] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Instance network_info: |[{"id": "9c3d513b-f06d-467b-b341-fad8c2e38a9b", "address": "fa:16:3e:5a:b7:6d", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c3d513b-f0", "ovs_interfaceid": "9c3d513b-f06d-467b-b341-fad8c2e38a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1741.837320] env[61855]: DEBUG oslo_concurrency.lockutils [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] Acquired lock "refresh_cache-e49e5769-b561-48e0-9009-21e71844238f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.837499] env[61855]: DEBUG nova.network.neutron [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] Refreshing network info cache for port 9c3d513b-f06d-467b-b341-fad8c2e38a9b {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1741.838535] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:b7:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1d468f87-964a-4fb6-bab3-b83f6f2646b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c3d513b-f06d-467b-b341-fad8c2e38a9b', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1741.846326] env[61855]: DEBUG oslo.service.loopingcall [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1741.849198] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e49e5769-b561-48e0-9009-21e71844238f] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1741.849638] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6901fa4c-b016-422b-a635-5467b1371f67 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.870365] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1741.870365] env[61855]: value = "task-4302954" [ 1741.870365] env[61855]: _type = "Task" [ 1741.870365] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.877541] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302954, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.166685] env[61855]: DEBUG nova.network.neutron [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] Updated VIF entry in instance network info cache for port 9c3d513b-f06d-467b-b341-fad8c2e38a9b. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1742.167069] env[61855]: DEBUG nova.network.neutron [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] [instance: e49e5769-b561-48e0-9009-21e71844238f] Updating instance_info_cache with network_info: [{"id": "9c3d513b-f06d-467b-b341-fad8c2e38a9b", "address": "fa:16:3e:5a:b7:6d", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c3d513b-f0", "ovs_interfaceid": "9c3d513b-f06d-467b-b341-fad8c2e38a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.179963] env[61855]: DEBUG oslo_concurrency.lockutils [req-b39e5995-0e88-466f-bc27-6c31eed46371 req-380e9b91-a103-4f15-9797-8c51c74a8812 service nova] Releasing lock "refresh_cache-e49e5769-b561-48e0-9009-21e71844238f" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.380468] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302954, 'name': CreateVM_Task, 'duration_secs': 0.290547} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.380634] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e49e5769-b561-48e0-9009-21e71844238f] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1742.381317] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.381489] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.381823] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1742.382096] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dc05c78-6c97-4c1a-b3af-d6aaebe29f59 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.386136] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 1742.386136] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5276d713-41a5-0c48-1b84-9e4bd8e53d6f" [ 1742.386136] env[61855]: _type = "Task" [ 1742.386136] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.394671] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5276d713-41a5-0c48-1b84-9e4bd8e53d6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.897070] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.897424] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1742.897424] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.239740] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1759.263456] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Getting list of instances from cluster (obj){ [ 1759.263456] env[61855]: value = "domain-c8" [ 1759.263456] env[61855]: _type = "ClusterComputeResource" [ 1759.263456] env[61855]: } {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1759.264781] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11e20a3-dee0-4663-b505-d45139ea8ae8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.281557] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Got total of 10 instances {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1759.281817] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid cf66f181-60e6-43d4-a561-a32e9174448d {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.282042] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 242e1a24-3f5b-4509-8677-e5a4c7883605 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.282215] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 0ccec194-ef9c-42b7-8fd3-0baa3b012842 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.282374] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid a1cac88d-1c85-4f4a-9527-1be4dc7dba21 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.282557] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid db4efbf1-db2e-404b-90fb-57c6a56bf7c7 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.282716] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 8653615e-3254-436e-984d-e52fdfb86ce4 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.282867] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid d7d51668-c93b-4db2-ab7c-10345258fbc7 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.283023] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.283175] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid ba293405-d3ea-4a1d-b21d-c44bff58dcb6 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.283321] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid e49e5769-b561-48e0-9009-21e71844238f {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1759.283689] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "cf66f181-60e6-43d4-a561-a32e9174448d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.283913] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "242e1a24-3f5b-4509-8677-e5a4c7883605" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.284131] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.284339] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.284542] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.284739] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "8653615e-3254-436e-984d-e52fdfb86ce4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.284935] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.285146] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.285340] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.285531] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "e49e5769-b561-48e0-9009-21e71844238f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.201205] env[61855]: DEBUG oslo_concurrency.lockutils [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.828189] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.719762] env[61855]: WARNING oslo_vmware.rw_handles [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1787.719762] env[61855]: ERROR oslo_vmware.rw_handles [ 1787.720454] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1787.722393] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1787.722671] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Copying Virtual Disk [datastore2] vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/44bbe2f0-26bc-43c6-aa4c-4dcd50e66276/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1787.723021] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80db4b7b-f011-4cd1-a66f-ec10317821a7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.730637] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1787.730637] env[61855]: value = "task-4302955" [ 1787.730637] env[61855]: _type = "Task" [ 1787.730637] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.738452] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302955, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.241661] env[61855]: DEBUG oslo_vmware.exceptions [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1788.241925] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.242474] env[61855]: ERROR nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1788.242474] env[61855]: Faults: ['InvalidArgument'] [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Traceback (most recent call last): [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] yield resources [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self.driver.spawn(context, instance, image_meta, [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self._fetch_image_if_missing(context, vi) [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] image_cache(vi, tmp_image_ds_loc) [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] vm_util.copy_virtual_disk( [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] session._wait_for_task(vmdk_copy_task) [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] return self.wait_for_task(task_ref) [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] return evt.wait() [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] result = hub.switch() [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] return self.greenlet.switch() [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self.f(*self.args, **self.kw) [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] raise exceptions.translate_fault(task_info.error) [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Faults: ['InvalidArgument'] [ 1788.242474] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] [ 1788.244496] env[61855]: INFO nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Terminating instance [ 1788.244496] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.244679] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1788.244846] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8aaa6687-1f9d-4e12-80fa-c17daca6b9cd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.246915] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1788.247124] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1788.247831] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c6fca3-f21b-4930-b10b-4984951b02eb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.256134] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1788.256362] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d24dc30-5e26-473e-a4b8-628d4637e125 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.258432] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1788.258607] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1788.259605] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9effdf43-442f-49d4-80ca-0bf44fc666ec {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.264296] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 1788.264296] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52ec5a18-0612-4584-bacb-7ab340405ec9" [ 1788.264296] env[61855]: _type = "Task" [ 1788.264296] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.271757] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52ec5a18-0612-4584-bacb-7ab340405ec9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.324062] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1788.324298] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1788.324545] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleting the datastore file [datastore2] cf66f181-60e6-43d4-a561-a32e9174448d {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1788.324749] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ead4a9d5-3d92-4343-b6fd-d620ae26a181 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.331464] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1788.331464] env[61855]: value = "task-4302957" [ 1788.331464] env[61855]: _type = "Task" [ 1788.331464] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.339207] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.614016] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "333ca086-f585-4325-9ba8-fbcdfc6650f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.614274] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.774395] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1788.774746] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating directory with path [datastore2] vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1788.774889] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73389ca0-8d3e-4595-bfa7-6592c80c7bb9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.786669] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created directory with path [datastore2] vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1788.786868] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Fetch image to [datastore2] vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1788.787034] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1788.787862] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d67e392-6c5d-4e36-8396-12780d85893e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.794585] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4947dd-d774-4696-a89d-09d3efa3fb62 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.803309] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98ff0e5-debe-4007-9737-2f00796ab5b3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.835451] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b304c2cb-0034-431f-9a76-4f133282b14f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.844282] env[61855]: DEBUG oslo_vmware.api [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094637} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.844765] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1788.844948] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1788.845135] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1788.845307] env[61855]: INFO nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1788.846802] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a6185a89-c5bb-40f9-926a-a384cab5dac4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.848627] env[61855]: DEBUG nova.compute.claims [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1788.848799] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.849016] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.870479] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1788.924063] env[61855]: DEBUG oslo_vmware.rw_handles [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1788.982062] env[61855]: DEBUG oslo_vmware.rw_handles [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1788.982272] env[61855]: DEBUG oslo_vmware.rw_handles [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1789.084882] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f985d310-e0b7-4128-870b-d06427328faa {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.092490] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb810c3-1f3c-4e19-adf7-a36589eec56b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.122151] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65facde-6d9c-4aaa-a9c5-f98582093239 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.128995] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1375d3-7332-4a3a-908c-316de90ccf3d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.141945] env[61855]: DEBUG nova.compute.provider_tree [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1789.150615] env[61855]: DEBUG nova.scheduler.client.report [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1789.168263] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.319s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.168793] env[61855]: ERROR nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1789.168793] env[61855]: Faults: ['InvalidArgument'] [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Traceback (most recent call last): [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self.driver.spawn(context, instance, image_meta, [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self._fetch_image_if_missing(context, vi) [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] image_cache(vi, tmp_image_ds_loc) [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] vm_util.copy_virtual_disk( [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] session._wait_for_task(vmdk_copy_task) [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] return self.wait_for_task(task_ref) [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] return evt.wait() [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] result = hub.switch() [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] return self.greenlet.switch() [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] self.f(*self.args, **self.kw) [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] raise exceptions.translate_fault(task_info.error) [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Faults: ['InvalidArgument'] [ 1789.168793] env[61855]: ERROR nova.compute.manager [instance: cf66f181-60e6-43d4-a561-a32e9174448d] [ 1789.169632] env[61855]: DEBUG nova.compute.utils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1789.170869] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Build of instance cf66f181-60e6-43d4-a561-a32e9174448d was re-scheduled: A specified parameter was not correct: fileType [ 1789.170869] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1789.171577] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1789.171772] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1789.171948] env[61855]: DEBUG nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1789.172133] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1789.504908] env[61855]: DEBUG nova.network.neutron [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.515499] env[61855]: INFO nova.compute.manager [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Took 0.34 seconds to deallocate network for instance. [ 1789.619396] env[61855]: INFO nova.scheduler.client.report [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted allocations for instance cf66f181-60e6-43d4-a561-a32e9174448d [ 1789.645203] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6f72a8de-9819-4935-ab2d-3b667d192d63 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "cf66f181-60e6-43d4-a561-a32e9174448d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 598.864s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.646708] env[61855]: DEBUG oslo_concurrency.lockutils [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "cf66f181-60e6-43d4-a561-a32e9174448d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 403.108s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.646939] env[61855]: DEBUG oslo_concurrency.lockutils [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "cf66f181-60e6-43d4-a561-a32e9174448d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.647169] env[61855]: DEBUG oslo_concurrency.lockutils [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "cf66f181-60e6-43d4-a561-a32e9174448d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.647332] env[61855]: DEBUG oslo_concurrency.lockutils [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "cf66f181-60e6-43d4-a561-a32e9174448d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.649281] env[61855]: INFO nova.compute.manager [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Terminating instance [ 1789.651375] env[61855]: DEBUG nova.compute.manager [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1789.651567] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1789.652077] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fcff277f-4c52-484f-bec9-b9423f84dfad {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.656825] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1789.663369] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc6afeb-3e90-4d1d-a780-2c304e633781 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.693962] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cf66f181-60e6-43d4-a561-a32e9174448d could not be found. [ 1789.694198] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1789.694380] env[61855]: INFO nova.compute.manager [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1789.694626] env[61855]: DEBUG oslo.service.loopingcall [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.696875] env[61855]: DEBUG nova.compute.manager [-] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1789.696982] env[61855]: DEBUG nova.network.neutron [-] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1789.711031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.711297] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.712783] env[61855]: INFO nova.compute.claims [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1789.723888] env[61855]: DEBUG nova.network.neutron [-] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.741545] env[61855]: INFO nova.compute.manager [-] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] Took 0.04 seconds to deallocate network for instance. [ 1789.836492] env[61855]: DEBUG oslo_concurrency.lockutils [None req-cdf8f18c-e5d3-44c0-9c32-60715feeec08 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "cf66f181-60e6-43d4-a561-a32e9174448d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.837342] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "cf66f181-60e6-43d4-a561-a32e9174448d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 30.554s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.837535] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: cf66f181-60e6-43d4-a561-a32e9174448d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1789.837709] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "cf66f181-60e6-43d4-a561-a32e9174448d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.895735] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5d408a-77d0-4151-ad16-8f1366ea311f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.903624] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9282afc9-19cd-4b44-97a9-06abf51d1ee8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.934872] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2e95f5-4876-4b8b-8678-785cd77faa8f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.942248] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc4e43d-7ef2-46cb-addb-85616c3b972f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.955257] env[61855]: DEBUG nova.compute.provider_tree [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1789.963302] env[61855]: DEBUG nova.scheduler.client.report [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1789.976878] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.266s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.977366] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1790.010057] env[61855]: DEBUG nova.compute.utils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1790.011905] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1790.011905] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1790.021225] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1790.067014] env[61855]: DEBUG nova.policy [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5c2a17d1334b36a2495d9b9bda5783', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c13499e461e24fc6964aa428afe66651', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1790.087237] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1790.113611] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.113851] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.114151] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.114364] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.114516] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.114666] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.114876] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.115050] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.115230] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.115394] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.115568] env[61855]: DEBUG nova.virt.hardware [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.116469] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325dfe7c-79d5-4c74-9915-156dbb08aac3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.124563] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd04fb45-0557-49bc-b44a-eed117bed1e7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.599123] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Successfully created port: 174bc0d9-7669-491b-a2df-e3d9eafaaba0 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1791.312778] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Successfully updated port: 174bc0d9-7669-491b-a2df-e3d9eafaaba0 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1791.324398] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "refresh_cache-c63748b0-5d55-447f-a11d-f0704edc1e86" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.324571] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "refresh_cache-c63748b0-5d55-447f-a11d-f0704edc1e86" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.324762] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1791.368250] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1791.546391] env[61855]: DEBUG nova.compute.manager [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Received event network-vif-plugged-174bc0d9-7669-491b-a2df-e3d9eafaaba0 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1791.546617] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] Acquiring lock "c63748b0-5d55-447f-a11d-f0704edc1e86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.546825] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.546993] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.547183] env[61855]: DEBUG nova.compute.manager [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] No waiting events found dispatching network-vif-plugged-174bc0d9-7669-491b-a2df-e3d9eafaaba0 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1791.547348] env[61855]: WARNING nova.compute.manager [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Received unexpected event network-vif-plugged-174bc0d9-7669-491b-a2df-e3d9eafaaba0 for instance with vm_state building and task_state spawning. [ 1791.547509] env[61855]: DEBUG nova.compute.manager [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Received event network-changed-174bc0d9-7669-491b-a2df-e3d9eafaaba0 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1791.547668] env[61855]: DEBUG nova.compute.manager [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Refreshing instance network info cache due to event network-changed-174bc0d9-7669-491b-a2df-e3d9eafaaba0. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1791.547834] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] Acquiring lock "refresh_cache-c63748b0-5d55-447f-a11d-f0704edc1e86" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.613857] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Updating instance_info_cache with network_info: [{"id": "174bc0d9-7669-491b-a2df-e3d9eafaaba0", "address": "fa:16:3e:86:30:82", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap174bc0d9-76", "ovs_interfaceid": "174bc0d9-7669-491b-a2df-e3d9eafaaba0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.626905] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "refresh_cache-c63748b0-5d55-447f-a11d-f0704edc1e86" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.627221] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Instance network_info: |[{"id": "174bc0d9-7669-491b-a2df-e3d9eafaaba0", "address": "fa:16:3e:86:30:82", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap174bc0d9-76", "ovs_interfaceid": "174bc0d9-7669-491b-a2df-e3d9eafaaba0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1791.627526] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] Acquired lock "refresh_cache-c63748b0-5d55-447f-a11d-f0704edc1e86" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.627701] env[61855]: DEBUG nova.network.neutron [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Refreshing network info cache for port 174bc0d9-7669-491b-a2df-e3d9eafaaba0 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1791.628781] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:30:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1d468f87-964a-4fb6-bab3-b83f6f2646b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '174bc0d9-7669-491b-a2df-e3d9eafaaba0', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1791.637937] env[61855]: DEBUG oslo.service.loopingcall [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.638883] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1791.641228] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46a13176-b24b-460b-bd7b-c9f401af62c2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.663136] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1791.663136] env[61855]: value = "task-4302958" [ 1791.663136] env[61855]: _type = "Task" [ 1791.663136] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.671459] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302958, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.929591] env[61855]: DEBUG nova.network.neutron [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Updated VIF entry in instance network info cache for port 174bc0d9-7669-491b-a2df-e3d9eafaaba0. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1791.929947] env[61855]: DEBUG nova.network.neutron [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Updating instance_info_cache with network_info: [{"id": "174bc0d9-7669-491b-a2df-e3d9eafaaba0", "address": "fa:16:3e:86:30:82", "network": {"id": "686d5571-cecf-4270-92df-5a9ffd4d0ec4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1912386450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c13499e461e24fc6964aa428afe66651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap174bc0d9-76", "ovs_interfaceid": "174bc0d9-7669-491b-a2df-e3d9eafaaba0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.940066] env[61855]: DEBUG oslo_concurrency.lockutils [req-2a7b4dd7-0e37-443f-b977-bf6bd6266976 req-c1c854d3-0b1a-4432-a23d-abd6f8ada1fd service nova] Releasing lock "refresh_cache-c63748b0-5d55-447f-a11d-f0704edc1e86" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.964476] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.172816] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302958, 'name': CreateVM_Task, 'duration_secs': 0.268612} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.173090] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1792.173666] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.173834] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.174321] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.174585] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cae38625-57ba-47a4-a7de-e6410484224b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.179013] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 1792.179013] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c8339c-ac05-2d7f-5b89-b138f86e0d71" [ 1792.179013] env[61855]: _type = "Task" [ 1792.179013] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.186475] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52c8339c-ac05-2d7f-5b89-b138f86e0d71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.689260] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.689559] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1792.689734] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.923782] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1793.924512] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1793.924880] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1793.924930] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1795.924404] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.924794] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1795.924794] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1795.946766] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.946927] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947057] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947188] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947313] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947433] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947552] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947670] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947801] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.947918] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1795.948047] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1795.948528] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.924211] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.923702] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.935573] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.935805] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.935974] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.936156] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1797.937287] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cfb207-be95-4cba-9d59-d0027b3320a1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.946075] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032b86b2-6525-4498-b94b-a45af3928954 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.959698] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6dea2a-cac2-4470-aae7-de0f5594f792 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.966229] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65feb37-02f7-419f-b8a6-cb79445ab80b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.996284] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180654MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1797.996398] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.996577] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.064940] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065142] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065245] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065369] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065490] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065606] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065725] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065869] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.065950] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.066079] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1798.077208] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1798.077415] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1798.077559] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1798.093428] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing inventories for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1798.107762] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating ProviderTree inventory for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1798.107932] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1798.117915] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing aggregate associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, aggregates: None {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1798.134867] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing trait associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1798.249350] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5d53f9-c49e-4b8d-b948-a4ee887ba2b1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.256468] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89b240b-aafe-4851-97dd-3e36451f0c74 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.285312] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db7b2b4-1558-4012-a37c-451663ffef93 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.291876] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9c1244-6564-4648-9355-1f123bcb2bbe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.304159] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1798.312104] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1798.346035] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1798.346035] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.349s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.346767] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.203817] env[61855]: WARNING oslo_vmware.rw_handles [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1837.203817] env[61855]: ERROR oslo_vmware.rw_handles [ 1837.204467] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1837.206448] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1837.206710] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Copying Virtual Disk [datastore2] vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/73032a28-a5b9-4ef9-9fe5-128432b7b0f5/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1837.207030] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ef81b72-6ca7-45f0-bf3f-d16e8cd5f5a6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.214544] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 1837.214544] env[61855]: value = "task-4302959" [ 1837.214544] env[61855]: _type = "Task" [ 1837.214544] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.222526] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': task-4302959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.725364] env[61855]: DEBUG oslo_vmware.exceptions [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1837.725662] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.726234] env[61855]: ERROR nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1837.726234] env[61855]: Faults: ['InvalidArgument'] [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Traceback (most recent call last): [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] yield resources [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self.driver.spawn(context, instance, image_meta, [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self._fetch_image_if_missing(context, vi) [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] image_cache(vi, tmp_image_ds_loc) [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] vm_util.copy_virtual_disk( [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] session._wait_for_task(vmdk_copy_task) [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] return self.wait_for_task(task_ref) [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] return evt.wait() [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] result = hub.switch() [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] return self.greenlet.switch() [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self.f(*self.args, **self.kw) [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] raise exceptions.translate_fault(task_info.error) [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Faults: ['InvalidArgument'] [ 1837.726234] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] [ 1837.727265] env[61855]: INFO nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Terminating instance [ 1837.728370] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.728550] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1837.728794] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ea5ce34-d555-4842-94dd-8bd9c7e55cf0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.731223] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1837.731418] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1837.732158] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b1d26d-9c3d-4959-aa24-dc785f82b7b4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.738798] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1837.738995] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-743751ec-b644-426b-a842-d44ee8175c32 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.741059] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1837.741244] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1837.742223] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d7081e9-0257-4d5b-9bf7-136093d45233 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.747846] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Waiting for the task: (returnval){ [ 1837.747846] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527e624e-4ffa-b769-d9fc-ef5b7ad2aeb4" [ 1837.747846] env[61855]: _type = "Task" [ 1837.747846] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.755399] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527e624e-4ffa-b769-d9fc-ef5b7ad2aeb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.812197] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1837.812417] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1837.812601] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Deleting the datastore file [datastore2] 242e1a24-3f5b-4509-8677-e5a4c7883605 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1837.812860] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92cc43a0-f831-432a-8a01-b7434ad9101a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.819145] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 1837.819145] env[61855]: value = "task-4302961" [ 1837.819145] env[61855]: _type = "Task" [ 1837.819145] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.826432] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': task-4302961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.258572] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1838.258900] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Creating directory with path [datastore2] vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1838.259041] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c62155f3-de19-4678-9739-e7648e5a6881 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.271543] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Created directory with path [datastore2] vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1838.271724] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Fetch image to [datastore2] vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1838.271897] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1838.272636] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce0433c-bce6-4e5b-945f-c5d4ebb08da0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.278951] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31981585-c941-430a-aa92-c4e1b9d64bb7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.287593] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122e24f5-d806-423f-9949-1f91614b75ed {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.318486] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168e617a-66e9-484c-868e-d63e53c15c33 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.329303] env[61855]: DEBUG oslo_vmware.api [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': task-4302961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063937} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.329504] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-711bac2d-5032-4050-a8fb-f7df20c6343a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.331100] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1838.331351] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1838.331509] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1838.331696] env[61855]: INFO nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1838.333778] env[61855]: DEBUG nova.compute.claims [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1838.333948] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.334183] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.353010] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1838.519579] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1838.574329] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce62471d-60ac-4626-afcf-4cddad23452e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.578904] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1838.579097] env[61855]: DEBUG oslo_vmware.rw_handles [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1838.582744] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b13208-e2b3-4277-b55b-6e1a880f1870 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.612604] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5867fcb-db67-4426-a53c-2de14f9a4ed4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.619260] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2650528d-c255-40e7-9248-b8db4d7342d2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.632824] env[61855]: DEBUG nova.compute.provider_tree [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.642915] env[61855]: DEBUG nova.scheduler.client.report [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1838.657263] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.323s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.657789] env[61855]: ERROR nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1838.657789] env[61855]: Faults: ['InvalidArgument'] [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Traceback (most recent call last): [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self.driver.spawn(context, instance, image_meta, [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self._fetch_image_if_missing(context, vi) [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] image_cache(vi, tmp_image_ds_loc) [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] vm_util.copy_virtual_disk( [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] session._wait_for_task(vmdk_copy_task) [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] return self.wait_for_task(task_ref) [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] return evt.wait() [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] result = hub.switch() [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] return self.greenlet.switch() [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] self.f(*self.args, **self.kw) [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] raise exceptions.translate_fault(task_info.error) [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Faults: ['InvalidArgument'] [ 1838.657789] env[61855]: ERROR nova.compute.manager [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] [ 1838.658651] env[61855]: DEBUG nova.compute.utils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1838.659850] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Build of instance 242e1a24-3f5b-4509-8677-e5a4c7883605 was re-scheduled: A specified parameter was not correct: fileType [ 1838.659850] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1838.660276] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1838.660411] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1838.660580] env[61855]: DEBUG nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1838.660744] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1838.931115] env[61855]: DEBUG nova.network.neutron [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.945174] env[61855]: INFO nova.compute.manager [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Took 0.28 seconds to deallocate network for instance. [ 1839.041028] env[61855]: INFO nova.scheduler.client.report [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Deleted allocations for instance 242e1a24-3f5b-4509-8677-e5a4c7883605 [ 1839.064591] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1b4ad829-249a-40cf-abb1-445dcb0fba6d tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 646.545s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.065789] env[61855]: DEBUG oslo_concurrency.lockutils [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 450.777s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.065978] env[61855]: DEBUG oslo_concurrency.lockutils [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "242e1a24-3f5b-4509-8677-e5a4c7883605-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.066151] env[61855]: DEBUG oslo_concurrency.lockutils [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.066326] env[61855]: DEBUG oslo_concurrency.lockutils [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.068280] env[61855]: INFO nova.compute.manager [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Terminating instance [ 1839.069985] env[61855]: DEBUG nova.compute.manager [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1839.070193] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1839.070669] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63bd7742-b899-42b9-abc1-655e2194f04e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.079569] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca3d0cf-2156-4b11-92a7-2788c46687cd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.090839] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1839.111576] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 242e1a24-3f5b-4509-8677-e5a4c7883605 could not be found. [ 1839.111799] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1839.112019] env[61855]: INFO nova.compute.manager [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1839.112260] env[61855]: DEBUG oslo.service.loopingcall [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1839.112496] env[61855]: DEBUG nova.compute.manager [-] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1839.112590] env[61855]: DEBUG nova.network.neutron [-] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1839.145383] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.145623] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.147095] env[61855]: INFO nova.compute.claims [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1839.150093] env[61855]: DEBUG nova.network.neutron [-] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.157391] env[61855]: INFO nova.compute.manager [-] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] Took 0.04 seconds to deallocate network for instance. [ 1839.251275] env[61855]: DEBUG oslo_concurrency.lockutils [None req-39dd94e1-9f83-4a2c-b5b8-fd06218720b9 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.252159] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 79.968s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.252370] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 242e1a24-3f5b-4509-8677-e5a4c7883605] During sync_power_state the instance has a pending task (deleting). Skip. [ 1839.252767] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "242e1a24-3f5b-4509-8677-e5a4c7883605" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.329680] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe0920-3fe5-4310-997f-b16ba0322cc8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.339350] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce78e20a-c0ff-4d73-8515-3906f8618cd8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.369761] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511e12ea-04f9-4f67-b23e-7f8ef02a8087 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.377239] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea8431a-977e-4779-b390-fdc013c2953e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.390707] env[61855]: DEBUG nova.compute.provider_tree [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1839.401045] env[61855]: DEBUG nova.scheduler.client.report [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1839.414429] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.269s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.414974] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1839.447788] env[61855]: DEBUG nova.compute.utils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1839.449151] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1839.449342] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1839.457753] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1839.522227] env[61855]: DEBUG nova.policy [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24e7161122834e938ca6156e3f8c2855', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1eb57982dd094432baccff494449adad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1839.530211] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1839.555978] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1839.556252] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1839.556411] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1839.556595] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1839.556745] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1839.556895] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1839.557111] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1839.557283] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1839.557451] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1839.557614] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1839.557795] env[61855]: DEBUG nova.virt.hardware [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1839.558644] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c9f17e-39c2-452a-83bc-25eec44d1f87 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.566257] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ad3d8b-2508-4370-b5c7-87de697afcd2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.852068] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Successfully created port: 32c7997b-f36e-4c11-bcd1-de6d0ec52700 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1840.486552] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Successfully updated port: 32c7997b-f36e-4c11-bcd1-de6d0ec52700 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1840.498728] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "refresh_cache-333ca086-f585-4325-9ba8-fbcdfc6650f5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.498865] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "refresh_cache-333ca086-f585-4325-9ba8-fbcdfc6650f5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.499020] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1840.538205] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1840.713946] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Updating instance_info_cache with network_info: [{"id": "32c7997b-f36e-4c11-bcd1-de6d0ec52700", "address": "fa:16:3e:0a:06:56", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c7997b-f3", "ovs_interfaceid": "32c7997b-f36e-4c11-bcd1-de6d0ec52700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.727149] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "refresh_cache-333ca086-f585-4325-9ba8-fbcdfc6650f5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.727433] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Instance network_info: |[{"id": "32c7997b-f36e-4c11-bcd1-de6d0ec52700", "address": "fa:16:3e:0a:06:56", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c7997b-f3", "ovs_interfaceid": "32c7997b-f36e-4c11-bcd1-de6d0ec52700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1840.727846] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:06:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32c7997b-f36e-4c11-bcd1-de6d0ec52700', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1840.735417] env[61855]: DEBUG oslo.service.loopingcall [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.735862] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1840.736131] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64e017a2-40cc-4f08-aebf-d6ea9606ce5b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.756228] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1840.756228] env[61855]: value = "task-4302962" [ 1840.756228] env[61855]: _type = "Task" [ 1840.756228] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.763732] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302962, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.974841] env[61855]: DEBUG nova.compute.manager [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Received event network-vif-plugged-32c7997b-f36e-4c11-bcd1-de6d0ec52700 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1840.975043] env[61855]: DEBUG oslo_concurrency.lockutils [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] Acquiring lock "333ca086-f585-4325-9ba8-fbcdfc6650f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.975260] env[61855]: DEBUG oslo_concurrency.lockutils [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.975429] env[61855]: DEBUG oslo_concurrency.lockutils [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.975599] env[61855]: DEBUG nova.compute.manager [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] No waiting events found dispatching network-vif-plugged-32c7997b-f36e-4c11-bcd1-de6d0ec52700 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1840.975767] env[61855]: WARNING nova.compute.manager [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Received unexpected event network-vif-plugged-32c7997b-f36e-4c11-bcd1-de6d0ec52700 for instance with vm_state building and task_state spawning. [ 1840.975936] env[61855]: DEBUG nova.compute.manager [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Received event network-changed-32c7997b-f36e-4c11-bcd1-de6d0ec52700 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1840.976385] env[61855]: DEBUG nova.compute.manager [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Refreshing instance network info cache due to event network-changed-32c7997b-f36e-4c11-bcd1-de6d0ec52700. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1840.976600] env[61855]: DEBUG oslo_concurrency.lockutils [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] Acquiring lock "refresh_cache-333ca086-f585-4325-9ba8-fbcdfc6650f5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.976744] env[61855]: DEBUG oslo_concurrency.lockutils [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] Acquired lock "refresh_cache-333ca086-f585-4325-9ba8-fbcdfc6650f5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.976905] env[61855]: DEBUG nova.network.neutron [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Refreshing network info cache for port 32c7997b-f36e-4c11-bcd1-de6d0ec52700 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1841.266457] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302962, 'name': CreateVM_Task, 'duration_secs': 0.300113} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.266638] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1841.413971] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.414175] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.414498] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1841.414806] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e08b7d1-9b22-403a-a002-021df4c278e3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.419848] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 1841.419848] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5265aee2-cb5f-cd16-c0e2-f21aea1acd2e" [ 1841.419848] env[61855]: _type = "Task" [ 1841.419848] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.427884] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5265aee2-cb5f-cd16-c0e2-f21aea1acd2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.507916] env[61855]: DEBUG nova.network.neutron [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Updated VIF entry in instance network info cache for port 32c7997b-f36e-4c11-bcd1-de6d0ec52700. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1841.508346] env[61855]: DEBUG nova.network.neutron [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Updating instance_info_cache with network_info: [{"id": "32c7997b-f36e-4c11-bcd1-de6d0ec52700", "address": "fa:16:3e:0a:06:56", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c7997b-f3", "ovs_interfaceid": "32c7997b-f36e-4c11-bcd1-de6d0ec52700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.517706] env[61855]: DEBUG oslo_concurrency.lockutils [req-78a5b78a-a091-4455-b90a-3856b2849b02 req-d8d0acc2-ab54-4d0a-a685-8033021c335d service nova] Releasing lock "refresh_cache-333ca086-f585-4325-9ba8-fbcdfc6650f5" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.930313] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.930606] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1841.930822] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.919721] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.743458] env[61855]: DEBUG oslo_concurrency.lockutils [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "c63748b0-5d55-447f-a11d-f0704edc1e86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.820969] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "e49e5769-b561-48e0-9009-21e71844238f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.923722] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.923517] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.923851] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.924133] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1855.920248] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1855.943709] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1855.944076] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1855.944198] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1855.963160] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.963323] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.963454] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.963860] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.963860] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.963860] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.964044] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.964232] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.964414] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.964543] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1855.964666] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1856.924577] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.924108] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.924489] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.924547] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.936546] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.936767] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.936931] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.937109] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1858.938213] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174bd394-8a33-4251-9a36-c1e1907153b5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.946948] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa379645-7eb3-43fb-a78e-0f7b2bb8cd34 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.960915] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1540c8ff-4038-4c18-bc7d-19da05af4a2d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.967049] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb69700-bf4b-46ff-8e39-6e55e8c21236 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.996382] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180669MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1858.996519] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.996704] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.069710] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.069874] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.070014] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.070143] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.070264] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.070381] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.070499] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.070614] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.070731] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.071125] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1859.071125] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1859.071260] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1859.192147] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d2afbe-1811-4eee-9b96-f0d438d26a5b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.199621] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcfd221-2716-4efa-9d62-39694aff043d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.228150] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc859ba2-384a-4b92-b640-d2793ba871ef {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.234688] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6f1519-612a-4bd3-9a6f-d9a1f5009dcb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.247166] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.255189] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1859.269565] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1859.269745] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.273s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.298635] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.298982] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.752600] env[61855]: WARNING oslo_vmware.rw_handles [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1887.752600] env[61855]: ERROR oslo_vmware.rw_handles [ 1887.753483] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1887.755722] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1887.755968] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Copying Virtual Disk [datastore2] vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/720c80d2-e4ed-4136-8f5f-aa23c97b459f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1887.756319] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dff4aa3-4cc4-4dcf-b56f-71345d911a63 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.766436] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Waiting for the task: (returnval){ [ 1887.766436] env[61855]: value = "task-4302963" [ 1887.766436] env[61855]: _type = "Task" [ 1887.766436] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.775016] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Task: {'id': task-4302963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.277230] env[61855]: DEBUG oslo_vmware.exceptions [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1888.277520] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.278082] env[61855]: ERROR nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1888.278082] env[61855]: Faults: ['InvalidArgument'] [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Traceback (most recent call last): [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] yield resources [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self.driver.spawn(context, instance, image_meta, [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self._fetch_image_if_missing(context, vi) [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] image_cache(vi, tmp_image_ds_loc) [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] vm_util.copy_virtual_disk( [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] session._wait_for_task(vmdk_copy_task) [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] return self.wait_for_task(task_ref) [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] return evt.wait() [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] result = hub.switch() [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] return self.greenlet.switch() [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self.f(*self.args, **self.kw) [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] raise exceptions.translate_fault(task_info.error) [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Faults: ['InvalidArgument'] [ 1888.278082] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] [ 1888.279091] env[61855]: INFO nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Terminating instance [ 1888.281008] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.281008] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1888.281008] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c977ca7-632b-4c04-9e48-6c66c6a404a6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.282662] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1888.282859] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1888.283587] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bfe3bb-4f24-4219-9fc2-f0c0f2ec5d1b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.290662] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1888.290890] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fc31b09-55f7-47a2-8bfb-ac6e02dd6b97 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.293014] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1888.293207] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1888.294128] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fec135d0-c9d8-4557-a2f0-c435cf574b6a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.298729] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 1888.298729] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]520a5dd8-4a55-dd11-9029-5c4fcd70f3f4" [ 1888.298729] env[61855]: _type = "Task" [ 1888.298729] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.314017] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1888.314097] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating directory with path [datastore2] vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1888.314336] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4471d7ea-4c22-4067-a008-efb841c211ff {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.333946] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created directory with path [datastore2] vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1888.334162] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Fetch image to [datastore2] vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1888.334373] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1888.335147] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61ca284-8d55-4f30-9e3a-b44079a90a74 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.341750] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71005e0f-5c5f-4997-b31a-c4c243ec3061 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.350459] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e0d898-a366-42c5-9763-c144ac137443 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.382783] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593bafd9-8ca7-4143-ac7e-3ec85c79f9cc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.385128] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1888.385331] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1888.385511] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Deleting the datastore file [datastore2] 0ccec194-ef9c-42b7-8fd3-0baa3b012842 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1888.385720] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffa49ec4-7c7d-4389-901a-9f8e6b63d084 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.390234] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d4a8c38a-7ffb-47b5-86f8-044045d36b9c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.392879] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Waiting for the task: (returnval){ [ 1888.392879] env[61855]: value = "task-4302965" [ 1888.392879] env[61855]: _type = "Task" [ 1888.392879] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.399974] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Task: {'id': task-4302965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.410091] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1888.457952] env[61855]: DEBUG oslo_vmware.rw_handles [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1888.516245] env[61855]: DEBUG oslo_vmware.rw_handles [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1888.516425] env[61855]: DEBUG oslo_vmware.rw_handles [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1888.903131] env[61855]: DEBUG oslo_vmware.api [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Task: {'id': task-4302965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068871} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.903515] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1888.903575] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1888.903740] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1888.903910] env[61855]: INFO nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1888.906083] env[61855]: DEBUG nova.compute.claims [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1888.906260] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.906470] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.088500] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1074c5e3-68c9-432b-ae1c-33b70cc404e9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.095623] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2436487f-3779-4d2a-ba3e-d2748f305372 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.125757] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c5ea14-bf19-464f-8fe3-55ce64d38a96 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.133197] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a637ba-9a8f-47db-88ae-2a7717dc397f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.146719] env[61855]: DEBUG nova.compute.provider_tree [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1889.155646] env[61855]: DEBUG nova.scheduler.client.report [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1889.171029] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.171497] env[61855]: ERROR nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1889.171497] env[61855]: Faults: ['InvalidArgument'] [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Traceback (most recent call last): [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self.driver.spawn(context, instance, image_meta, [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self._fetch_image_if_missing(context, vi) [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] image_cache(vi, tmp_image_ds_loc) [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] vm_util.copy_virtual_disk( [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] session._wait_for_task(vmdk_copy_task) [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] return self.wait_for_task(task_ref) [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] return evt.wait() [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] result = hub.switch() [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] return self.greenlet.switch() [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] self.f(*self.args, **self.kw) [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] raise exceptions.translate_fault(task_info.error) [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Faults: ['InvalidArgument'] [ 1889.171497] env[61855]: ERROR nova.compute.manager [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] [ 1889.172558] env[61855]: DEBUG nova.compute.utils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1889.173579] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Build of instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 was re-scheduled: A specified parameter was not correct: fileType [ 1889.173579] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1889.173950] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1889.174144] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1889.174350] env[61855]: DEBUG nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1889.174530] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1889.544067] env[61855]: DEBUG nova.network.neutron [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.569977] env[61855]: INFO nova.compute.manager [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Took 0.40 seconds to deallocate network for instance. [ 1889.674017] env[61855]: INFO nova.scheduler.client.report [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Deleted allocations for instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 [ 1889.701037] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8cbf28d9-efb0-4b98-82ee-05d3fe9cbf79 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 680.116s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.702260] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 483.736s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.702480] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.702684] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.702851] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.705044] env[61855]: INFO nova.compute.manager [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Terminating instance [ 1889.708546] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquiring lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.708705] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Acquired lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.708874] env[61855]: DEBUG nova.network.neutron [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1889.714853] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1889.771620] env[61855]: DEBUG nova.network.neutron [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1889.777482] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.777722] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.779272] env[61855]: INFO nova.compute.claims [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1889.878029] env[61855]: DEBUG nova.network.neutron [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.887067] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Releasing lock "refresh_cache-0ccec194-ef9c-42b7-8fd3-0baa3b012842" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.887480] env[61855]: DEBUG nova.compute.manager [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1889.887683] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1889.888212] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5385c048-3890-4e9d-b518-8df9d4350fa8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.899597] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6514abaa-99ea-4611-86fc-1bf686f6dcc6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.931481] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0ccec194-ef9c-42b7-8fd3-0baa3b012842 could not be found. [ 1889.931761] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1889.931805] env[61855]: INFO nova.compute.manager [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1889.932049] env[61855]: DEBUG oslo.service.loopingcall [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1889.932294] env[61855]: DEBUG nova.compute.manager [-] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1889.932391] env[61855]: DEBUG nova.network.neutron [-] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1889.949711] env[61855]: DEBUG nova.network.neutron [-] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1889.956845] env[61855]: DEBUG nova.network.neutron [-] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.965482] env[61855]: INFO nova.compute.manager [-] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] Took 0.03 seconds to deallocate network for instance. [ 1889.971901] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd4b3f6-6fab-487f-9444-8bd286a35f46 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.979407] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599b7561-c397-45c5-a186-c36f2670dbc2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.012261] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926545aa-a002-4d4a-bdaf-c048b307007c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.019407] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63667516-3219-40a8-a356-ee856abbf732 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.034714] env[61855]: DEBUG nova.compute.provider_tree [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1890.042259] env[61855]: DEBUG nova.scheduler.client.report [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1890.060840] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.061338] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1890.069963] env[61855]: DEBUG oslo_concurrency.lockutils [None req-1ac2666d-3a59-43b2-91ef-2c9447ee79b4 tempest-ListServersNegativeTestJSON-1338885870 tempest-ListServersNegativeTestJSON-1338885870-project-member] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.368s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.070752] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 130.787s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.070941] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 0ccec194-ef9c-42b7-8fd3-0baa3b012842] During sync_power_state the instance has a pending task (deleting). Skip. [ 1890.071127] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "0ccec194-ef9c-42b7-8fd3-0baa3b012842" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.094673] env[61855]: DEBUG nova.compute.utils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1890.095881] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1890.096066] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1890.103798] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1890.150995] env[61855]: DEBUG nova.policy [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6555bca994941ef876a79f963c827e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e855a24a69fb43d8955f7f8fe8cbb3da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1890.175446] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1890.204939] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1890.205281] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1890.205492] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1890.205690] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1890.205840] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1890.205990] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1890.206215] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1890.206380] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1890.206571] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1890.206763] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1890.206966] env[61855]: DEBUG nova.virt.hardware [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1890.207870] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5dca95-adf5-4e9a-9ff9-25ebafe8fa2e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.216776] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c7d518-c564-4332-8ac7-04c5e5f8a025 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.569743] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Successfully created port: 8d5b55f8-3eba-4f8c-9f45-548229767d5e {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1891.193137] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Successfully updated port: 8d5b55f8-3eba-4f8c-9f45-548229767d5e {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1891.204645] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "refresh_cache-97cb77dc-8752-4d84-a2f3-9c22b241fa9d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.204795] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "refresh_cache-97cb77dc-8752-4d84-a2f3-9c22b241fa9d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.204950] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1891.249156] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1891.624351] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Updating instance_info_cache with network_info: [{"id": "8d5b55f8-3eba-4f8c-9f45-548229767d5e", "address": "fa:16:3e:ae:c7:64", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d5b55f8-3e", "ovs_interfaceid": "8d5b55f8-3eba-4f8c-9f45-548229767d5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.641161] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "refresh_cache-97cb77dc-8752-4d84-a2f3-9c22b241fa9d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.641459] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Instance network_info: |[{"id": "8d5b55f8-3eba-4f8c-9f45-548229767d5e", "address": "fa:16:3e:ae:c7:64", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d5b55f8-3e", "ovs_interfaceid": "8d5b55f8-3eba-4f8c-9f45-548229767d5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1891.641997] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:c7:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d5b55f8-3eba-4f8c-9f45-548229767d5e', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1891.649828] env[61855]: DEBUG oslo.service.loopingcall [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.650336] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1891.650575] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d2249dd-ee79-40ad-a3b1-9ad47b5eb07b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.671264] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1891.671264] env[61855]: value = "task-4302966" [ 1891.671264] env[61855]: _type = "Task" [ 1891.671264] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.678745] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302966, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.681379] env[61855]: DEBUG nova.compute.manager [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Received event network-vif-plugged-8d5b55f8-3eba-4f8c-9f45-548229767d5e {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1891.681588] env[61855]: DEBUG oslo_concurrency.lockutils [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] Acquiring lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.681743] env[61855]: DEBUG oslo_concurrency.lockutils [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.681969] env[61855]: DEBUG oslo_concurrency.lockutils [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.682153] env[61855]: DEBUG nova.compute.manager [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] No waiting events found dispatching network-vif-plugged-8d5b55f8-3eba-4f8c-9f45-548229767d5e {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1891.682341] env[61855]: WARNING nova.compute.manager [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Received unexpected event network-vif-plugged-8d5b55f8-3eba-4f8c-9f45-548229767d5e for instance with vm_state building and task_state spawning. [ 1891.682507] env[61855]: DEBUG nova.compute.manager [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Received event network-changed-8d5b55f8-3eba-4f8c-9f45-548229767d5e {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1891.682661] env[61855]: DEBUG nova.compute.manager [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Refreshing instance network info cache due to event network-changed-8d5b55f8-3eba-4f8c-9f45-548229767d5e. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1891.682842] env[61855]: DEBUG oslo_concurrency.lockutils [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] Acquiring lock "refresh_cache-97cb77dc-8752-4d84-a2f3-9c22b241fa9d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.682979] env[61855]: DEBUG oslo_concurrency.lockutils [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] Acquired lock "refresh_cache-97cb77dc-8752-4d84-a2f3-9c22b241fa9d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.683155] env[61855]: DEBUG nova.network.neutron [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Refreshing network info cache for port 8d5b55f8-3eba-4f8c-9f45-548229767d5e {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1891.969128] env[61855]: DEBUG nova.network.neutron [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Updated VIF entry in instance network info cache for port 8d5b55f8-3eba-4f8c-9f45-548229767d5e. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1891.969480] env[61855]: DEBUG nova.network.neutron [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Updating instance_info_cache with network_info: [{"id": "8d5b55f8-3eba-4f8c-9f45-548229767d5e", "address": "fa:16:3e:ae:c7:64", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d5b55f8-3e", "ovs_interfaceid": "8d5b55f8-3eba-4f8c-9f45-548229767d5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.979401] env[61855]: DEBUG oslo_concurrency.lockutils [req-ea8f86f6-95a7-4c47-88b3-30ad61a1b8a7 req-30e94049-443a-40b9-afc9-9ee1eae71320 service nova] Releasing lock "refresh_cache-97cb77dc-8752-4d84-a2f3-9c22b241fa9d" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.185022] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302966, 'name': CreateVM_Task, 'duration_secs': 0.3198} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.185300] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1892.186134] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.186380] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.186823] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1892.187166] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27409fa9-8489-478c-8bc7-e937eb966c4a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.193036] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 1892.193036] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5229d6b2-c6a9-5d48-58d8-40e572748d52" [ 1892.193036] env[61855]: _type = "Task" [ 1892.193036] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.204384] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5229d6b2-c6a9-5d48-58d8-40e572748d52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.704342] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.705051] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1892.705051] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.265275] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1913.924782] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.924223] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.924646] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1915.924646] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1915.950253] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.950420] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.950547] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.950654] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.950812] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.950955] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.951241] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.951396] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.951523] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.951645] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1915.951768] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1915.952284] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.952458] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1916.924133] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.924394] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.924683] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.925075] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.936869] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.937129] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.937314] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.937473] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1918.938880] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2823b6-5051-4ba6-8231-639dd5c69d6d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.947446] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2187d8d3-3d64-44b3-90dc-30a3642b30b2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.961999] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f49413a-2c1a-47f5-b3bc-cba630bdda1f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.967965] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf458a8-c560-4572-a9a2-cfc65f787774 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.996222] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180683MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1918.996362] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.996530] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.067359] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.067522] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.067652] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.067778] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.067897] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.068027] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.068147] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.068264] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.068376] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.068489] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1919.068668] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1919.068807] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1919.177342] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c26abf1-e7fe-4b9b-affb-02ea27037058 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.185086] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a963adb-d00d-4c94-a976-8ed7c2e468df {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.214624] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c1628a-9d1a-4970-8b4e-b413f5de27e0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.221033] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a54826b-4a7b-40b8-9a1c-12b7f88e5fb2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.233614] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.241513] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1919.255744] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1919.255938] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.259s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.255597] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1935.058538] env[61855]: WARNING oslo_vmware.rw_handles [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1935.058538] env[61855]: ERROR oslo_vmware.rw_handles [ 1935.059282] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1935.061169] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1935.061431] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Copying Virtual Disk [datastore2] vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/e9114b12-8658-42e8-9812-6392ea56d0d9/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1935.061751] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2467b38-08ca-44e7-8aed-9507fbf4b88b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.070234] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 1935.070234] env[61855]: value = "task-4302967" [ 1935.070234] env[61855]: _type = "Task" [ 1935.070234] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.078076] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.580643] env[61855]: DEBUG oslo_vmware.exceptions [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1935.580929] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.581579] env[61855]: ERROR nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1935.581579] env[61855]: Faults: ['InvalidArgument'] [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Traceback (most recent call last): [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] yield resources [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self.driver.spawn(context, instance, image_meta, [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self._fetch_image_if_missing(context, vi) [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] image_cache(vi, tmp_image_ds_loc) [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] vm_util.copy_virtual_disk( [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] session._wait_for_task(vmdk_copy_task) [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] return self.wait_for_task(task_ref) [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] return evt.wait() [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] result = hub.switch() [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] return self.greenlet.switch() [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self.f(*self.args, **self.kw) [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] raise exceptions.translate_fault(task_info.error) [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Faults: ['InvalidArgument'] [ 1935.581579] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] [ 1935.582579] env[61855]: INFO nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Terminating instance [ 1935.584686] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.584686] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1935.584686] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99bc3cc6-f229-466e-9f54-030a673b0dc3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.587588] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1935.588993] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1935.588993] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08441bdf-e1e4-48be-a767-c532e2654d10 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.591966] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1935.592145] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1935.593092] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d83d52d0-355e-478a-8e73-3210604fefef {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.596922] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1935.597416] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56f9f4aa-1040-41dc-81d5-0d4a2c59afbe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.599704] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 1935.599704] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52430e53-7ecf-41a3-31ba-d07071ec2115" [ 1935.599704] env[61855]: _type = "Task" [ 1935.599704] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.608482] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52430e53-7ecf-41a3-31ba-d07071ec2115, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.664746] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1935.665033] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1935.665335] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleting the datastore file [datastore2] a1cac88d-1c85-4f4a-9527-1be4dc7dba21 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1935.665616] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08f6e79b-c1ff-4c99-98bc-0776b221b9ac {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.671302] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 1935.671302] env[61855]: value = "task-4302969" [ 1935.671302] env[61855]: _type = "Task" [ 1935.671302] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.678912] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.110613] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1936.110998] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating directory with path [datastore2] vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1936.111111] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abe101b6-4694-4bd4-94ae-f4bee80209e6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.121852] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Created directory with path [datastore2] vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1936.122055] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Fetch image to [datastore2] vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1936.122234] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1936.122982] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338def77-2ec1-41db-a82b-b373da9dd94f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.129626] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7cba30-b6c8-4961-818c-fcae5cdb5d6d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.139297] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9db601-60fc-47ff-9e3c-4bb6f928e570 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.170856] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c41c7f7-5bd6-4fab-8742-b108e88cc7f5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.181525] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7144c45c-fcbe-45c2-8bd3-08a7651f38db {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.183172] env[61855]: DEBUG oslo_vmware.api [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066944} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.183414] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1936.183595] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1936.183767] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1936.183939] env[61855]: INFO nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1936.186097] env[61855]: DEBUG nova.compute.claims [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1936.186285] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.186509] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.204898] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1936.265742] env[61855]: DEBUG oslo_vmware.rw_handles [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1936.326371] env[61855]: DEBUG oslo_vmware.rw_handles [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1936.326677] env[61855]: DEBUG oslo_vmware.rw_handles [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1936.409015] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b3f117-2f63-4c44-b719-0c790dfb39a1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.416809] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab741a9d-b14e-41a2-bef7-1c1b20950d38 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.447597] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad0a0dc-dd17-4cba-a55b-b6ba6c45fc7e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.454501] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93521402-abcb-4fd2-a0e9-ddaf614d157a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.467370] env[61855]: DEBUG nova.compute.provider_tree [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1936.476214] env[61855]: DEBUG nova.scheduler.client.report [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1936.490842] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.304s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.491493] env[61855]: ERROR nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1936.491493] env[61855]: Faults: ['InvalidArgument'] [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Traceback (most recent call last): [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self.driver.spawn(context, instance, image_meta, [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self._fetch_image_if_missing(context, vi) [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] image_cache(vi, tmp_image_ds_loc) [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] vm_util.copy_virtual_disk( [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] session._wait_for_task(vmdk_copy_task) [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] return self.wait_for_task(task_ref) [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] return evt.wait() [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] result = hub.switch() [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] return self.greenlet.switch() [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] self.f(*self.args, **self.kw) [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] raise exceptions.translate_fault(task_info.error) [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Faults: ['InvalidArgument'] [ 1936.491493] env[61855]: ERROR nova.compute.manager [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] [ 1936.492501] env[61855]: DEBUG nova.compute.utils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1936.493698] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Build of instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 was re-scheduled: A specified parameter was not correct: fileType [ 1936.493698] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1936.494093] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1936.494273] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1936.494446] env[61855]: DEBUG nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1936.494609] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1936.820919] env[61855]: DEBUG nova.network.neutron [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.835495] env[61855]: INFO nova.compute.manager [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Took 0.34 seconds to deallocate network for instance. [ 1937.165322] env[61855]: INFO nova.scheduler.client.report [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleted allocations for instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 [ 1937.186230] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7e80fc5c-2ad9-41bf-a655-a568f33da1db tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 674.671s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.186810] env[61855]: DEBUG oslo_concurrency.lockutils [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 478.066s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.186810] env[61855]: DEBUG oslo_concurrency.lockutils [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.187027] env[61855]: DEBUG oslo_concurrency.lockutils [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.187123] env[61855]: DEBUG oslo_concurrency.lockutils [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.189338] env[61855]: INFO nova.compute.manager [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Terminating instance [ 1937.191131] env[61855]: DEBUG nova.compute.manager [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1937.191327] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1937.192064] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3c5c305-cd92-479c-ae8a-c15c2638d66a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.200617] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef407581-1005-47ff-a353-251c1d28a274 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.229902] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a1cac88d-1c85-4f4a-9527-1be4dc7dba21 could not be found. [ 1937.230211] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1937.230396] env[61855]: INFO nova.compute.manager [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1937.230635] env[61855]: DEBUG oslo.service.loopingcall [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1937.230843] env[61855]: DEBUG nova.compute.manager [-] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1937.230938] env[61855]: DEBUG nova.network.neutron [-] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1937.254533] env[61855]: DEBUG nova.network.neutron [-] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.263294] env[61855]: INFO nova.compute.manager [-] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] Took 0.03 seconds to deallocate network for instance. [ 1937.346803] env[61855]: DEBUG oslo_concurrency.lockutils [None req-77a95f91-a840-409d-a625-0d388957d8ae tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.348106] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 178.063s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.348106] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: a1cac88d-1c85-4f4a-9527-1be4dc7dba21] During sync_power_state the instance has a pending task (deleting). Skip. [ 1937.348106] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "a1cac88d-1c85-4f4a-9527-1be4dc7dba21" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.920594] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1974.925613] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1975.924425] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1975.924646] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1975.924713] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1975.945647] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.945975] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.945975] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.946076] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.946173] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.946293] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.946419] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.946531] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.946643] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1975.946759] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1976.924881] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.555153] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.555447] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.567178] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1977.618031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.618031] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.619489] env[61855]: INFO nova.compute.claims [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1977.778930] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ee4d5a-34ed-49bf-85f5-bb11223083a9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.786623] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c60dc26-938b-46e2-8895-70515e066f99 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.817949] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b0bd13-22bd-4bac-977f-832e9bfce7f3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.825300] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac43fdcc-efcf-40ea-81d0-019273d63ed8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.838789] env[61855]: DEBUG nova.compute.provider_tree [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1977.847686] env[61855]: DEBUG nova.scheduler.client.report [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1977.861725] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.244s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.862218] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1977.893091] env[61855]: DEBUG nova.compute.utils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1977.894572] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1977.894748] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1977.905161] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1977.924240] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.924412] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1977.953589] env[61855]: DEBUG nova.policy [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5256e05619f744e988b78876f04b7286', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a6c123dab04b01868b291d2b953e75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1977.970389] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1977.990920] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1977.991176] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1977.991342] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1977.991529] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1977.991681] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1977.991831] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1977.992369] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1977.992598] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1977.992815] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1977.992991] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1977.993194] env[61855]: DEBUG nova.virt.hardware [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1977.994734] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c17446-f7be-4488-8126-b3d257591f76 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.005540] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d82c87-f097-41d0-9720-d7bd495f685d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.343495] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Successfully created port: 6aa4503e-d12c-484f-858d-f2bee24b3ade {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1978.587793] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "aa42a06f-c679-4530-8762-15ea8dc35d9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.588101] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "aa42a06f-c679-4530-8762-15ea8dc35d9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.915942] env[61855]: DEBUG nova.compute.manager [req-bcfd1725-81f9-4b19-afd8-c2eb1baa24da req-a5168873-2f01-4ac0-bc3b-e2beb08d4f87 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Received event network-vif-plugged-6aa4503e-d12c-484f-858d-f2bee24b3ade {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1978.916176] env[61855]: DEBUG oslo_concurrency.lockutils [req-bcfd1725-81f9-4b19-afd8-c2eb1baa24da req-a5168873-2f01-4ac0-bc3b-e2beb08d4f87 service nova] Acquiring lock "455d56f3-71f3-4024-a52b-8fd59ca923d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.916386] env[61855]: DEBUG oslo_concurrency.lockutils [req-bcfd1725-81f9-4b19-afd8-c2eb1baa24da req-a5168873-2f01-4ac0-bc3b-e2beb08d4f87 service nova] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.916596] env[61855]: DEBUG oslo_concurrency.lockutils [req-bcfd1725-81f9-4b19-afd8-c2eb1baa24da req-a5168873-2f01-4ac0-bc3b-e2beb08d4f87 service nova] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.916781] env[61855]: DEBUG nova.compute.manager [req-bcfd1725-81f9-4b19-afd8-c2eb1baa24da req-a5168873-2f01-4ac0-bc3b-e2beb08d4f87 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] No waiting events found dispatching network-vif-plugged-6aa4503e-d12c-484f-858d-f2bee24b3ade {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1978.916943] env[61855]: WARNING nova.compute.manager [req-bcfd1725-81f9-4b19-afd8-c2eb1baa24da req-a5168873-2f01-4ac0-bc3b-e2beb08d4f87 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Received unexpected event network-vif-plugged-6aa4503e-d12c-484f-858d-f2bee24b3ade for instance with vm_state building and task_state spawning. [ 1978.924238] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1978.995225] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Successfully updated port: 6aa4503e-d12c-484f-858d-f2bee24b3ade {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1979.006509] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "refresh_cache-455d56f3-71f3-4024-a52b-8fd59ca923d7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.006628] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "refresh_cache-455d56f3-71f3-4024-a52b-8fd59ca923d7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.006904] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1979.048349] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1979.209885] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Updating instance_info_cache with network_info: [{"id": "6aa4503e-d12c-484f-858d-f2bee24b3ade", "address": "fa:16:3e:65:39:18", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa4503e-d1", "ovs_interfaceid": "6aa4503e-d12c-484f-858d-f2bee24b3ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.222809] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "refresh_cache-455d56f3-71f3-4024-a52b-8fd59ca923d7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.223116] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Instance network_info: |[{"id": "6aa4503e-d12c-484f-858d-f2bee24b3ade", "address": "fa:16:3e:65:39:18", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa4503e-d1", "ovs_interfaceid": "6aa4503e-d12c-484f-858d-f2bee24b3ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1979.223560] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:39:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f925dc8-2145-457e-a4d4-c07117356dd0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6aa4503e-d12c-484f-858d-f2bee24b3ade', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1979.231187] env[61855]: DEBUG oslo.service.loopingcall [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1979.231640] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1979.231874] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6031eb47-ebbb-4002-8f96-ef40ecf9ec78 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.252231] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1979.252231] env[61855]: value = "task-4302970" [ 1979.252231] env[61855]: _type = "Task" [ 1979.252231] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.259771] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302970, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.762251] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302970, 'name': CreateVM_Task, 'duration_secs': 0.320601} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.762594] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1979.763062] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.763236] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.763585] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1979.763831] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8365a4a0-26ae-4531-8d5d-4e77b543ff20 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.767881] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 1979.767881] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b11d48-7bc4-71d6-70f1-2bee19e53222" [ 1979.767881] env[61855]: _type = "Task" [ 1979.767881] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.775013] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52b11d48-7bc4-71d6-70f1-2bee19e53222, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.919782] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.941189] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.278506] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.278793] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1980.279015] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.924069] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.924393] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.937686] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.938329] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.938329] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.938329] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1980.939544] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551c6a97-8ac7-47a7-8623-e80cb661ee80 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.949493] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb10e89d-4fff-4064-93b3-3a9481cf1b03 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.954158] env[61855]: DEBUG nova.compute.manager [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Received event network-changed-6aa4503e-d12c-484f-858d-f2bee24b3ade {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1980.954347] env[61855]: DEBUG nova.compute.manager [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Refreshing instance network info cache due to event network-changed-6aa4503e-d12c-484f-858d-f2bee24b3ade. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1980.954740] env[61855]: DEBUG oslo_concurrency.lockutils [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] Acquiring lock "refresh_cache-455d56f3-71f3-4024-a52b-8fd59ca923d7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.954895] env[61855]: DEBUG oslo_concurrency.lockutils [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] Acquired lock "refresh_cache-455d56f3-71f3-4024-a52b-8fd59ca923d7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.955070] env[61855]: DEBUG nova.network.neutron [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Refreshing network info cache for port 6aa4503e-d12c-484f-858d-f2bee24b3ade {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1980.967308] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc17286-da29-477d-9ff1-54e4c2cf83ee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.976372] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fd0cde-4c06-42b2-9a83-507f2b448b9f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.007972] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180688MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1981.007972] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.007972] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.084072] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.084248] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 8653615e-3254-436e-984d-e52fdfb86ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.084379] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.084504] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.084625] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.084816] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.084958] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.085137] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.085299] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.085424] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.100244] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.100471] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1981.100622] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1981.238075] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41715d1-3a08-44ed-b95b-b78e31ff39c2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.245402] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b251d2e5-e4b1-4448-9947-5304522d99a2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.276797] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0674f2-67ba-41af-8168-53c6007381da {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.283992] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c18495-4788-4ac5-9dd9-f37d97c8afc3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.298967] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.307405] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1981.325725] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1981.325927] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.318s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.329078] env[61855]: DEBUG nova.network.neutron [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Updated VIF entry in instance network info cache for port 6aa4503e-d12c-484f-858d-f2bee24b3ade. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1981.329415] env[61855]: DEBUG nova.network.neutron [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Updating instance_info_cache with network_info: [{"id": "6aa4503e-d12c-484f-858d-f2bee24b3ade", "address": "fa:16:3e:65:39:18", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa4503e-d1", "ovs_interfaceid": "6aa4503e-d12c-484f-858d-f2bee24b3ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.337468] env[61855]: DEBUG oslo_concurrency.lockutils [req-8d8594f5-d1c1-440c-a575-094419ee2733 req-180ac671-4fd8-4b88-863d-5afdcaf7d913 service nova] Releasing lock "refresh_cache-455d56f3-71f3-4024-a52b-8fd59ca923d7" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.567729] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "333ca086-f585-4325-9ba8-fbcdfc6650f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.665487] env[61855]: WARNING oslo_vmware.rw_handles [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1985.665487] env[61855]: ERROR oslo_vmware.rw_handles [ 1985.666361] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1985.668207] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1985.668484] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Copying Virtual Disk [datastore2] vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/c5c569b3-2f1d-4fd7-8386-868471d4f6cb/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1985.668768] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc66601b-c67e-417f-8550-5935efcd9620 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.677086] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 1985.677086] env[61855]: value = "task-4302971" [ 1985.677086] env[61855]: _type = "Task" [ 1985.677086] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.684484] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': task-4302971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.187201] env[61855]: DEBUG oslo_vmware.exceptions [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1986.187499] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.188100] env[61855]: ERROR nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1986.188100] env[61855]: Faults: ['InvalidArgument'] [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Traceback (most recent call last): [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] yield resources [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self.driver.spawn(context, instance, image_meta, [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self._fetch_image_if_missing(context, vi) [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] image_cache(vi, tmp_image_ds_loc) [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] vm_util.copy_virtual_disk( [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] session._wait_for_task(vmdk_copy_task) [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] return self.wait_for_task(task_ref) [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] return evt.wait() [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] result = hub.switch() [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] return self.greenlet.switch() [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self.f(*self.args, **self.kw) [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] raise exceptions.translate_fault(task_info.error) [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Faults: ['InvalidArgument'] [ 1986.188100] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] [ 1986.189125] env[61855]: INFO nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Terminating instance [ 1986.190017] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.190234] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1986.190478] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdcf1c63-0804-4750-84eb-6acd74f3056d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.192755] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1986.192944] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1986.193662] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7dc601-6a8c-4fb7-88b4-6c7dc7437147 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.200224] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1986.200433] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86cee21c-863d-40e5-acdd-38243822a13c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.202478] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1986.202652] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1986.203574] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ede1093-31d9-4295-9b66-68f318aa5304 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.208758] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Waiting for the task: (returnval){ [ 1986.208758] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527ba601-4ada-b953-1455-db6d22837957" [ 1986.208758] env[61855]: _type = "Task" [ 1986.208758] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.219924] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527ba601-4ada-b953-1455-db6d22837957, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.266668] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1986.266898] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1986.267097] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Deleting the datastore file [datastore2] db4efbf1-db2e-404b-90fb-57c6a56bf7c7 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1986.267377] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-662a3f7e-126b-4f56-b45f-be52264d6750 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.273787] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 1986.273787] env[61855]: value = "task-4302973" [ 1986.273787] env[61855]: _type = "Task" [ 1986.273787] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.281116] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': task-4302973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.718262] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1986.718641] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Creating directory with path [datastore2] vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1986.718793] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbf29d0e-7de1-4d9e-8710-9e095c91a90f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.729070] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Created directory with path [datastore2] vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1986.729265] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Fetch image to [datastore2] vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1986.729438] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1986.730168] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84640d96-29a2-4eb4-92ed-44e18da0eace {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.736396] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6be3e66-3dbe-465c-8dc7-c4fd351874b7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.745984] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22f7902-4d79-4c13-93eb-b3a170fbba5b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.778419] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8979e1c4-bb5d-4e41-9505-607723351ef1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.784927] env[61855]: DEBUG oslo_vmware.api [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': task-4302973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082429} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.787027] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1986.787027] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1986.787027] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1986.787027] env[61855]: INFO nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1986.788711] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-874f532a-0e19-445e-a69f-eb0a2c1f18c2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.790633] env[61855]: DEBUG nova.compute.claims [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1986.790818] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.791033] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.812202] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1986.865449] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1986.925783] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1986.925783] env[61855]: DEBUG oslo_vmware.rw_handles [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1987.032115] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d95ac1-a8db-4335-ab52-c4e6056a29cf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.039598] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3066b41a-671f-4688-8c2b-35f14b7829fd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.070380] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bdf55b0-81c4-4c78-915f-533cf45034db {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.077144] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1591d235-2421-478d-9479-2ab6bffbe22c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.089728] env[61855]: DEBUG nova.compute.provider_tree [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1987.098162] env[61855]: DEBUG nova.scheduler.client.report [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1987.113344] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.113878] env[61855]: ERROR nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1987.113878] env[61855]: Faults: ['InvalidArgument'] [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Traceback (most recent call last): [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self.driver.spawn(context, instance, image_meta, [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self._fetch_image_if_missing(context, vi) [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] image_cache(vi, tmp_image_ds_loc) [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] vm_util.copy_virtual_disk( [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] session._wait_for_task(vmdk_copy_task) [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] return self.wait_for_task(task_ref) [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] return evt.wait() [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] result = hub.switch() [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] return self.greenlet.switch() [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] self.f(*self.args, **self.kw) [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] raise exceptions.translate_fault(task_info.error) [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Faults: ['InvalidArgument'] [ 1987.113878] env[61855]: ERROR nova.compute.manager [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] [ 1987.114904] env[61855]: DEBUG nova.compute.utils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1987.115910] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Build of instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 was re-scheduled: A specified parameter was not correct: fileType [ 1987.115910] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1987.116292] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1987.116467] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1987.116638] env[61855]: DEBUG nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1987.116800] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1987.426840] env[61855]: DEBUG nova.network.neutron [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.442554] env[61855]: INFO nova.compute.manager [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Took 0.33 seconds to deallocate network for instance. [ 1987.537399] env[61855]: INFO nova.scheduler.client.report [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Deleted allocations for instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 [ 1987.558356] env[61855]: DEBUG oslo_concurrency.lockutils [None req-a5a7bfdf-2c63-4411-bf65-b46751a8498f tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 591.527s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.559473] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 396.143s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.559700] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.559911] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.560088] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.562036] env[61855]: INFO nova.compute.manager [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Terminating instance [ 1987.563710] env[61855]: DEBUG nova.compute.manager [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1987.564275] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1987.564370] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-648845c7-09ae-482d-8b25-2227588a65dd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.569475] env[61855]: DEBUG nova.compute.manager [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1987.575884] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2766486-0210-4462-8c60-bb730ee9623f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.605177] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db4efbf1-db2e-404b-90fb-57c6a56bf7c7 could not be found. [ 1987.605504] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1987.605740] env[61855]: INFO nova.compute.manager [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1987.606018] env[61855]: DEBUG oslo.service.loopingcall [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1987.606250] env[61855]: DEBUG nova.compute.manager [-] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1987.606348] env[61855]: DEBUG nova.network.neutron [-] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1987.627769] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.628073] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.629571] env[61855]: INFO nova.compute.claims [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1987.633450] env[61855]: DEBUG nova.network.neutron [-] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.641408] env[61855]: INFO nova.compute.manager [-] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] Took 0.04 seconds to deallocate network for instance. [ 1987.755216] env[61855]: DEBUG oslo_concurrency.lockutils [None req-6508c28e-5fcc-45dc-b8b2-b293771d1f37 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.196s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.757702] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 228.472s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.757702] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: db4efbf1-db2e-404b-90fb-57c6a56bf7c7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1987.757702] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "db4efbf1-db2e-404b-90fb-57c6a56bf7c7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.835933] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befe4332-685d-4dab-8fa4-410844cf94eb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.844117] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba19e818-6640-4496-9b09-789c0f359d38 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.873376] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7ca41a-58ac-41bf-86ef-d5d5de2fd9d1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.880310] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb30285-f69a-49fc-a0f3-39a2bb3a283b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.892898] env[61855]: DEBUG nova.compute.provider_tree [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1987.901730] env[61855]: DEBUG nova.scheduler.client.report [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1987.915743] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.288s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.916210] env[61855]: DEBUG nova.compute.manager [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1987.947399] env[61855]: DEBUG nova.compute.utils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1987.949321] env[61855]: DEBUG nova.compute.manager [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1987.949497] env[61855]: DEBUG nova.network.neutron [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1987.959593] env[61855]: DEBUG nova.compute.manager [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1988.003760] env[61855]: DEBUG nova.policy [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6365552b80dc401d8a166f179d231b10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ec8ab8fc8404ec8a37780aa6e6fd40e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 1988.022839] env[61855]: DEBUG nova.compute.manager [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1988.048652] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1988.048899] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1988.049074] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1988.049265] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1988.049411] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1988.049704] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1988.049796] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1988.049957] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1988.050272] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1988.050465] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1988.050628] env[61855]: DEBUG nova.virt.hardware [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1988.051505] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70680905-5caf-4a2a-8242-cc2597272b04 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.059766] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d946bb-b94e-4dff-b7eb-73985a83dda3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.366718] env[61855]: DEBUG nova.network.neutron [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Successfully created port: cdcdc31a-38b9-4a98-9c8a-871004e89845 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1989.239093] env[61855]: DEBUG nova.network.neutron [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Successfully updated port: cdcdc31a-38b9-4a98-9c8a-871004e89845 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1989.249041] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "refresh_cache-aa42a06f-c679-4530-8762-15ea8dc35d9b" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.249202] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "refresh_cache-aa42a06f-c679-4530-8762-15ea8dc35d9b" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.249348] env[61855]: DEBUG nova.network.neutron [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1989.289162] env[61855]: DEBUG nova.network.neutron [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1989.479114] env[61855]: DEBUG nova.compute.manager [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Received event network-vif-plugged-cdcdc31a-38b9-4a98-9c8a-871004e89845 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1989.479314] env[61855]: DEBUG oslo_concurrency.lockutils [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] Acquiring lock "aa42a06f-c679-4530-8762-15ea8dc35d9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.479525] env[61855]: DEBUG oslo_concurrency.lockutils [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] Lock "aa42a06f-c679-4530-8762-15ea8dc35d9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.479696] env[61855]: DEBUG oslo_concurrency.lockutils [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] Lock "aa42a06f-c679-4530-8762-15ea8dc35d9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.479865] env[61855]: DEBUG nova.compute.manager [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] No waiting events found dispatching network-vif-plugged-cdcdc31a-38b9-4a98-9c8a-871004e89845 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1989.480046] env[61855]: WARNING nova.compute.manager [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Received unexpected event network-vif-plugged-cdcdc31a-38b9-4a98-9c8a-871004e89845 for instance with vm_state building and task_state spawning. [ 1989.480215] env[61855]: DEBUG nova.compute.manager [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Received event network-changed-cdcdc31a-38b9-4a98-9c8a-871004e89845 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1989.480374] env[61855]: DEBUG nova.compute.manager [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Refreshing instance network info cache due to event network-changed-cdcdc31a-38b9-4a98-9c8a-871004e89845. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1989.480539] env[61855]: DEBUG oslo_concurrency.lockutils [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] Acquiring lock "refresh_cache-aa42a06f-c679-4530-8762-15ea8dc35d9b" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.489654] env[61855]: DEBUG nova.network.neutron [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Updating instance_info_cache with network_info: [{"id": "cdcdc31a-38b9-4a98-9c8a-871004e89845", "address": "fa:16:3e:0e:be:da", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcdc31a-38", "ovs_interfaceid": "cdcdc31a-38b9-4a98-9c8a-871004e89845", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.502179] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "refresh_cache-aa42a06f-c679-4530-8762-15ea8dc35d9b" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.502446] env[61855]: DEBUG nova.compute.manager [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Instance network_info: |[{"id": "cdcdc31a-38b9-4a98-9c8a-871004e89845", "address": "fa:16:3e:0e:be:da", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcdc31a-38", "ovs_interfaceid": "cdcdc31a-38b9-4a98-9c8a-871004e89845", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1989.502717] env[61855]: DEBUG oslo_concurrency.lockutils [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] Acquired lock "refresh_cache-aa42a06f-c679-4530-8762-15ea8dc35d9b" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.503018] env[61855]: DEBUG nova.network.neutron [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Refreshing network info cache for port cdcdc31a-38b9-4a98-9c8a-871004e89845 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1989.503934] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:be:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdcdc31a-38b9-4a98-9c8a-871004e89845', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1989.512336] env[61855]: DEBUG oslo.service.loopingcall [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1989.515241] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1989.515875] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc1c0642-22ee-4619-b1f1-5620dadfdc1a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.537210] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1989.537210] env[61855]: value = "task-4302974" [ 1989.537210] env[61855]: _type = "Task" [ 1989.537210] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.545408] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302974, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.789453] env[61855]: DEBUG nova.network.neutron [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Updated VIF entry in instance network info cache for port cdcdc31a-38b9-4a98-9c8a-871004e89845. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1989.789880] env[61855]: DEBUG nova.network.neutron [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Updating instance_info_cache with network_info: [{"id": "cdcdc31a-38b9-4a98-9c8a-871004e89845", "address": "fa:16:3e:0e:be:da", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcdc31a-38", "ovs_interfaceid": "cdcdc31a-38b9-4a98-9c8a-871004e89845", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.799283] env[61855]: DEBUG oslo_concurrency.lockutils [req-9182f476-c084-4474-93d8-79b4fc066e5f req-1baa8af4-9794-42e1-95b3-679b482d6426 service nova] Releasing lock "refresh_cache-aa42a06f-c679-4530-8762-15ea8dc35d9b" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.047462] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302974, 'name': CreateVM_Task, 'duration_secs': 0.266275} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.047635] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1990.048366] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.048527] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.048836] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1990.049093] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32a723c7-65d7-4c7b-b8a8-0b91a93a99a4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.053603] env[61855]: DEBUG oslo_vmware.api [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 1990.053603] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52f6328c-885c-fd3a-7633-9b1470d0c9fb" [ 1990.053603] env[61855]: _type = "Task" [ 1990.053603] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.061202] env[61855]: DEBUG oslo_vmware.api [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52f6328c-885c-fd3a-7633-9b1470d0c9fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.563364] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.563669] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1990.563797] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.925949] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2031.926333] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61855) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2032.929333] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2035.096025] env[61855]: WARNING oslo_vmware.rw_handles [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2035.096025] env[61855]: ERROR oslo_vmware.rw_handles [ 2035.096651] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2035.098450] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2035.098684] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Copying Virtual Disk [datastore2] vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/7141993d-3f1b-4c35-921b-5f06e5937dad/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2035.098979] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83da02ef-d020-4b6c-874f-d06e5086bd10 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.107522] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Waiting for the task: (returnval){ [ 2035.107522] env[61855]: value = "task-4302975" [ 2035.107522] env[61855]: _type = "Task" [ 2035.107522] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.114894] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Task: {'id': task-4302975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.618495] env[61855]: DEBUG oslo_vmware.exceptions [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2035.618813] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.619382] env[61855]: ERROR nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2035.619382] env[61855]: Faults: ['InvalidArgument'] [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Traceback (most recent call last): [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] yield resources [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self.driver.spawn(context, instance, image_meta, [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self._fetch_image_if_missing(context, vi) [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] image_cache(vi, tmp_image_ds_loc) [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] vm_util.copy_virtual_disk( [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] session._wait_for_task(vmdk_copy_task) [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] return self.wait_for_task(task_ref) [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] return evt.wait() [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] result = hub.switch() [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] return self.greenlet.switch() [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self.f(*self.args, **self.kw) [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] raise exceptions.translate_fault(task_info.error) [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Faults: ['InvalidArgument'] [ 2035.619382] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] [ 2035.620444] env[61855]: INFO nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Terminating instance [ 2035.621339] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.621552] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2035.621799] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07b9761b-cecf-4b0b-8475-9e4913dc80f0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.624030] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2035.624239] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2035.624991] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31b51e8-0534-484b-83fe-45e0b6b19b2f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.632103] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2035.632346] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea204ea6-1be9-42f7-b7c0-5a84d4aaa67e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.634715] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2035.634894] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2035.635868] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02036cc3-0e32-4046-ac5c-7febb81683e9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.640488] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 2035.640488] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5243c765-fb5b-686e-d450-a4aa939e08cd" [ 2035.640488] env[61855]: _type = "Task" [ 2035.640488] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.647836] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5243c765-fb5b-686e-d450-a4aa939e08cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.696674] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2035.696890] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2035.697082] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Deleting the datastore file [datastore2] 8653615e-3254-436e-984d-e52fdfb86ce4 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2035.697367] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d72f8aeb-5406-4df6-864c-2e7305fb61a2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.703931] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Waiting for the task: (returnval){ [ 2035.703931] env[61855]: value = "task-4302977" [ 2035.703931] env[61855]: _type = "Task" [ 2035.703931] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.711238] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Task: {'id': task-4302977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.924356] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.151126] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2036.151470] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating directory with path [datastore2] vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2036.151470] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-959f0ea4-edab-4cfb-8c0d-f51ef16256e4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.163108] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Created directory with path [datastore2] vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2036.163299] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Fetch image to [datastore2] vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2036.163468] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2036.164223] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80d70cf-e708-4a85-93b2-d695209edd31 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.171079] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482bcc44-f269-43ef-afae-9e70867d0be7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.180051] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a518af-4112-4eb9-9fb8-90fe779bc108 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.213275] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbbdd49-5f4a-457c-8a9c-2879d0111fd9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.221395] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bf3be336-2a45-49fa-a088-4f43f72e38a8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.223083] env[61855]: DEBUG oslo_vmware.api [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Task: {'id': task-4302977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07249} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.223321] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2036.223502] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2036.223672] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2036.223844] env[61855]: INFO nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2036.225978] env[61855]: DEBUG nova.compute.claims [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2036.226188] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.226438] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.243244] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2036.295947] env[61855]: DEBUG oslo_vmware.rw_handles [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2036.355257] env[61855]: DEBUG oslo_vmware.rw_handles [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2036.355449] env[61855]: DEBUG oslo_vmware.rw_handles [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2036.467889] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8267751e-4376-4ebb-95bc-da9f3c6b19ff {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.475435] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b5af04-a2ce-4886-85ab-6bcf6ef597e5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.504064] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a79d94c-220a-4b08-a740-b181d1b2fbd2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.510653] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f71804c-c873-472c-a212-d30a0b5025b3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.524171] env[61855]: DEBUG nova.compute.provider_tree [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2036.533524] env[61855]: DEBUG nova.scheduler.client.report [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2036.546330] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.320s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.546859] env[61855]: ERROR nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2036.546859] env[61855]: Faults: ['InvalidArgument'] [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Traceback (most recent call last): [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self.driver.spawn(context, instance, image_meta, [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self._fetch_image_if_missing(context, vi) [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] image_cache(vi, tmp_image_ds_loc) [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] vm_util.copy_virtual_disk( [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] session._wait_for_task(vmdk_copy_task) [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] return self.wait_for_task(task_ref) [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] return evt.wait() [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] result = hub.switch() [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] return self.greenlet.switch() [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] self.f(*self.args, **self.kw) [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] raise exceptions.translate_fault(task_info.error) [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Faults: ['InvalidArgument'] [ 2036.546859] env[61855]: ERROR nova.compute.manager [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] [ 2036.547601] env[61855]: DEBUG nova.compute.utils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2036.548904] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Build of instance 8653615e-3254-436e-984d-e52fdfb86ce4 was re-scheduled: A specified parameter was not correct: fileType [ 2036.548904] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2036.549290] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2036.549466] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2036.549637] env[61855]: DEBUG nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2036.549798] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2036.925844] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.943725] env[61855]: DEBUG nova.network.neutron [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.954568] env[61855]: INFO nova.compute.manager [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Took 0.40 seconds to deallocate network for instance. [ 2037.049863] env[61855]: INFO nova.scheduler.client.report [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Deleted allocations for instance 8653615e-3254-436e-984d-e52fdfb86ce4 [ 2037.075852] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f8d5d703-7552-40c0-99c1-db215e004a54 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "8653615e-3254-436e-984d-e52fdfb86ce4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 594.823s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.076129] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "8653615e-3254-436e-984d-e52fdfb86ce4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 399.398s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.076418] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Acquiring lock "8653615e-3254-436e-984d-e52fdfb86ce4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.076891] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "8653615e-3254-436e-984d-e52fdfb86ce4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.077112] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "8653615e-3254-436e-984d-e52fdfb86ce4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.079222] env[61855]: INFO nova.compute.manager [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Terminating instance [ 2037.080919] env[61855]: DEBUG nova.compute.manager [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2037.081144] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2037.081635] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82d77ad4-20cd-4639-976e-25d0d3ab6d36 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.091007] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b7af6a-bdc0-452d-88e6-b3f7aa36c084 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.119634] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8653615e-3254-436e-984d-e52fdfb86ce4 could not be found. [ 2037.119737] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2037.119945] env[61855]: INFO nova.compute.manager [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2037.120209] env[61855]: DEBUG oslo.service.loopingcall [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2037.120440] env[61855]: DEBUG nova.compute.manager [-] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2037.120538] env[61855]: DEBUG nova.network.neutron [-] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2037.147712] env[61855]: DEBUG nova.network.neutron [-] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.155931] env[61855]: INFO nova.compute.manager [-] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] Took 0.04 seconds to deallocate network for instance. [ 2037.236268] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ec22431b-1125-4ad1-ad5b-213fbee0b3f4 tempest-InstanceActionsV221TestJSON-995282904 tempest-InstanceActionsV221TestJSON-995282904-project-member] Lock "8653615e-3254-436e-984d-e52fdfb86ce4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.237722] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "8653615e-3254-436e-984d-e52fdfb86ce4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 277.952s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.237722] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 8653615e-3254-436e-984d-e52fdfb86ce4] During sync_power_state the instance has a pending task (deleting). Skip. [ 2037.237722] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "8653615e-3254-436e-984d-e52fdfb86ce4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.924519] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.924600] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2037.924682] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2037.949098] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.949386] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.949557] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.950403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.950403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.950403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.950403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.950403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.950403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2037.950403] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2038.924942] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.924942] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.924942] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2039.923818] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.931535] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2042.924268] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2042.924738] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2042.938111] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.938329] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.938748] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.938748] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2042.940024] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a39e129-f521-48ee-b791-154ea89896d6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.948599] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdde48d2-95a3-4c6c-aebf-586f5e21c97e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.962186] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f30a073-a06b-42df-b60f-149b0f7336d9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.968324] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8a756d-9013-4722-a2f2-ca4eb02f8f94 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.996078] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180666MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2042.996220] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.996417] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.062983] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.063173] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.063301] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.063423] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.063543] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.063661] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.063776] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.063910] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.064020] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2043.064206] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2043.064346] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=110GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2043.170909] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5832b648-3148-44e9-a1a0-a28822b1dcd7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.178568] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb3b9a4-31e2-4123-89c6-3def630bc242 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.208966] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83274c28-6e85-4da2-8f01-d89eb7580344 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.215694] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84de648a-3871-4388-9620-37c2d4021b7a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.228221] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2043.236781] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2043.250630] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2043.250820] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.254s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.924209] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.924510] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2046.935422] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] There are 0 instances to clean {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2060.294068] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.113592] env[61855]: WARNING oslo_vmware.rw_handles [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2085.113592] env[61855]: ERROR oslo_vmware.rw_handles [ 2085.114302] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2085.116284] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2085.116534] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Copying Virtual Disk [datastore2] vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/a72dc164-9f56-43bc-9ae2-6a401e99d6bc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2085.116817] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afef2890-0feb-4693-84ff-69f7c4ee167d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.124753] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 2085.124753] env[61855]: value = "task-4302978" [ 2085.124753] env[61855]: _type = "Task" [ 2085.124753] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.134607] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': task-4302978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.634751] env[61855]: DEBUG oslo_vmware.exceptions [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2085.635059] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2085.635635] env[61855]: ERROR nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2085.635635] env[61855]: Faults: ['InvalidArgument'] [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Traceback (most recent call last): [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] yield resources [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self.driver.spawn(context, instance, image_meta, [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self._fetch_image_if_missing(context, vi) [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] image_cache(vi, tmp_image_ds_loc) [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] vm_util.copy_virtual_disk( [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] session._wait_for_task(vmdk_copy_task) [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] return self.wait_for_task(task_ref) [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] return evt.wait() [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] result = hub.switch() [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] return self.greenlet.switch() [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self.f(*self.args, **self.kw) [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] raise exceptions.translate_fault(task_info.error) [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Faults: ['InvalidArgument'] [ 2085.635635] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] [ 2085.636387] env[61855]: INFO nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Terminating instance [ 2085.638045] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2085.638045] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2085.638045] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c260779-547c-44d4-a3b5-1de7474a9972 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.640418] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2085.640611] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2085.641354] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16c20b3-6568-4581-bfcb-cc7bfdfd0623 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.648123] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2085.648339] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60ab79c9-dba7-42f1-a2c3-14029a2a9280 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.650447] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2085.650617] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2085.651553] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53dba4a8-98a3-4cca-b8c2-eacc66767d06 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.656211] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 2085.656211] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524bb54f-7f0d-f2bf-d663-587e3b3c7f31" [ 2085.656211] env[61855]: _type = "Task" [ 2085.656211] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.663847] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]524bb54f-7f0d-f2bf-d663-587e3b3c7f31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.722612] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2085.722825] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2085.723053] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Deleting the datastore file [datastore2] d7d51668-c93b-4db2-ab7c-10345258fbc7 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2085.723325] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8024b09b-7726-4b42-b4ce-a19e107579b8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.729640] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 2085.729640] env[61855]: value = "task-4302980" [ 2085.729640] env[61855]: _type = "Task" [ 2085.729640] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.737111] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': task-4302980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.166185] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2086.166460] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2086.166691] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ce79d16-cc63-4c41-a5a1-e7ad2b1cf757 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.177954] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2086.178240] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Fetch image to [datastore2] vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2086.178440] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2086.179202] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a36c88a-ddab-4d41-b014-1cbe90c13f3f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.185806] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509075ea-4466-4dfa-b6f6-6b8a5636614d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.194829] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56376e0c-daa8-4806-9298-13a97470446e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.226499] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf4b16a-55de-4fec-9eaf-87bcb102d60e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.971345] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-decdea09-283f-4070-9f17-a19acc72c976 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.976522] env[61855]: DEBUG oslo_vmware.api [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': task-4302980, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064304} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.977184] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2086.977379] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2086.977530] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2086.977710] env[61855]: INFO nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Took 1.34 seconds to destroy the instance on the hypervisor. [ 2086.979960] env[61855]: DEBUG nova.compute.claims [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2086.980179] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.980412] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.998308] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2087.056745] env[61855]: DEBUG oslo_vmware.rw_handles [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2087.117790] env[61855]: DEBUG oslo_vmware.rw_handles [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2087.119934] env[61855]: DEBUG oslo_vmware.rw_handles [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2087.214921] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a04cc4-6c41-4d7a-82a6-5ed72e765244 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.222867] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df241fe-9d95-487b-ba01-25248b7a9d2d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.252900] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0863c646-3217-4666-82cf-e569d48cdcf0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.259676] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6caa08a-ffd3-42df-a49c-a12c1da45657 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.272536] env[61855]: DEBUG nova.compute.provider_tree [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2087.280953] env[61855]: DEBUG nova.scheduler.client.report [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2087.294065] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.314s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.294586] env[61855]: ERROR nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.294586] env[61855]: Faults: ['InvalidArgument'] [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Traceback (most recent call last): [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self.driver.spawn(context, instance, image_meta, [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self._fetch_image_if_missing(context, vi) [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] image_cache(vi, tmp_image_ds_loc) [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] vm_util.copy_virtual_disk( [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] session._wait_for_task(vmdk_copy_task) [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] return self.wait_for_task(task_ref) [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] return evt.wait() [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] result = hub.switch() [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] return self.greenlet.switch() [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] self.f(*self.args, **self.kw) [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] raise exceptions.translate_fault(task_info.error) [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Faults: ['InvalidArgument'] [ 2087.294586] env[61855]: ERROR nova.compute.manager [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] [ 2087.295405] env[61855]: DEBUG nova.compute.utils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2087.296624] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Build of instance d7d51668-c93b-4db2-ab7c-10345258fbc7 was re-scheduled: A specified parameter was not correct: fileType [ 2087.296624] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2087.297026] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2087.297234] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2087.297414] env[61855]: DEBUG nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2087.297578] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2087.645542] env[61855]: DEBUG nova.network.neutron [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2087.658656] env[61855]: INFO nova.compute.manager [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Took 0.36 seconds to deallocate network for instance. [ 2087.771740] env[61855]: INFO nova.scheduler.client.report [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Deleted allocations for instance d7d51668-c93b-4db2-ab7c-10345258fbc7 [ 2087.794013] env[61855]: DEBUG oslo_concurrency.lockutils [None req-032cbb6c-de13-45d2-9bec-b8e04e6d1652 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.651s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.794314] env[61855]: DEBUG oslo_concurrency.lockutils [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 420.742s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.794542] env[61855]: DEBUG oslo_concurrency.lockutils [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "d7d51668-c93b-4db2-ab7c-10345258fbc7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.794757] env[61855]: DEBUG oslo_concurrency.lockutils [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.795150] env[61855]: DEBUG oslo_concurrency.lockutils [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.797388] env[61855]: INFO nova.compute.manager [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Terminating instance [ 2087.799434] env[61855]: DEBUG nova.compute.manager [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2087.799629] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2087.800158] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15b80ec8-6017-40bf-920f-9aafbfb37d6d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.808804] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b580dee-ab6e-45d8-a5ab-d027cc094429 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.836041] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7d51668-c93b-4db2-ab7c-10345258fbc7 could not be found. [ 2087.836256] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2087.836435] env[61855]: INFO nova.compute.manager [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2087.836678] env[61855]: DEBUG oslo.service.loopingcall [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2087.836907] env[61855]: DEBUG nova.compute.manager [-] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2087.837011] env[61855]: DEBUG nova.network.neutron [-] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2087.860264] env[61855]: DEBUG nova.network.neutron [-] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2087.868263] env[61855]: INFO nova.compute.manager [-] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] Took 0.03 seconds to deallocate network for instance. [ 2087.957140] env[61855]: DEBUG oslo_concurrency.lockutils [None req-74258f04-c82b-4831-965b-3becbe9be7f1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.957938] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 328.673s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.958173] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: d7d51668-c93b-4db2-ab7c-10345258fbc7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2087.958373] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "d7d51668-c93b-4db2-ab7c-10345258fbc7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.931123] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2095.924229] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.925062] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.925062] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2098.925062] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2098.944617] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.944811] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.944967] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.945086] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.945209] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.945338] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.945459] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.945580] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2098.945703] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2098.946211] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.946403] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.946563] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.946695] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2100.925461] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2102.920618] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.924651] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.924965] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2104.937362] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.937582] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.937747] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.937902] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2104.939031] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ec6fab-a046-444f-9880-53f7a252606a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.948364] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a33085-32e8-4751-8756-dfa6203ed292 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.961763] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb93b88-d1a6-4be1-bf3a-ebc9e95a0256 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.967886] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdb57a3-0114-4ac2-b7a8-87456aefdde7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.995904] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180665MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2104.996062] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.996258] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.057950] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.058117] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.058249] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.058372] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.058492] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.058610] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.058728] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.058844] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2105.059032] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2105.059172] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=110GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2105.073789] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing inventories for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2105.086045] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating ProviderTree inventory for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2105.086220] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2105.095986] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing aggregate associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, aggregates: None {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2105.112678] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing trait associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2105.210375] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4bb58c-7e55-4e5b-9808-cc450ae93c56 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.218051] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e186f63-2ea4-41e0-a32b-b4d4f00a2247 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.247798] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9892043d-5a2e-4cc2-8f04-be0dda7dedb1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.254477] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9cb2e9-3a8a-4406-b199-abea5aaed803 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.266800] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2105.275107] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2105.291130] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2105.291327] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.295s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.713371] env[61855]: WARNING oslo_vmware.rw_handles [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2135.713371] env[61855]: ERROR oslo_vmware.rw_handles [ 2135.714180] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2135.715790] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2135.716058] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Copying Virtual Disk [datastore2] vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/cce78a0e-3923-4509-9fa5-b63f59d7351a/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2135.716372] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f0ec03c-efe0-4a30-906d-a0e52d816297 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.724500] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 2135.724500] env[61855]: value = "task-4302981" [ 2135.724500] env[61855]: _type = "Task" [ 2135.724500] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.731972] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.234495] env[61855]: DEBUG oslo_vmware.exceptions [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2136.234754] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2136.235357] env[61855]: ERROR nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2136.235357] env[61855]: Faults: ['InvalidArgument'] [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Traceback (most recent call last): [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] yield resources [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self.driver.spawn(context, instance, image_meta, [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self._fetch_image_if_missing(context, vi) [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] image_cache(vi, tmp_image_ds_loc) [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] vm_util.copy_virtual_disk( [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] session._wait_for_task(vmdk_copy_task) [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] return self.wait_for_task(task_ref) [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] return evt.wait() [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] result = hub.switch() [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] return self.greenlet.switch() [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self.f(*self.args, **self.kw) [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] raise exceptions.translate_fault(task_info.error) [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Faults: ['InvalidArgument'] [ 2136.235357] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] [ 2136.236209] env[61855]: INFO nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Terminating instance [ 2136.237436] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.237650] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2136.237916] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cbe33d1-2393-4f33-9221-a1734e7bec62 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.240362] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2136.240569] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2136.241312] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29901a80-fb49-43b4-8788-8277b189388b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.247469] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2136.247671] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c64b5d95-3497-4fab-8f40-c9d7521ac317 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.249736] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2136.249911] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2136.250856] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a84e1d9-8f1f-41b5-828c-d1d58d73dd0a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.255356] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 2136.255356] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527b0516-d805-eb1e-5bcd-cc1896fc8e74" [ 2136.255356] env[61855]: _type = "Task" [ 2136.255356] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.261932] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527b0516-d805-eb1e-5bcd-cc1896fc8e74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.320165] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2136.320388] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2136.320572] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleting the datastore file [datastore2] feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2136.320841] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7fc0f05-0ed7-4c48-a681-a6c90c4da964 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.326484] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 2136.326484] env[61855]: value = "task-4302983" [ 2136.326484] env[61855]: _type = "Task" [ 2136.326484] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.333766] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.766900] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2136.767246] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating directory with path [datastore2] vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2136.767371] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84b066f4-e527-49ec-8906-33c8859fdaed {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.779553] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created directory with path [datastore2] vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2136.779797] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Fetch image to [datastore2] vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2136.779996] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2136.781046] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe4a55e-864d-472b-90d6-aa4b3d55802d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.787959] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c407da3-e869-4616-90a1-f55187520773 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.796625] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e400b26-1d71-483b-b279-bc97251b70e6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.826044] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a13e84-9c7c-4d2e-9029-aa081f988d35 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.836284] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6ac55b06-2b37-40ce-807b-a8958c39c7ab {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.837898] env[61855]: DEBUG oslo_vmware.api [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4302983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067607} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.838147] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2136.838338] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2136.838528] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2136.838710] env[61855]: INFO nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2136.840893] env[61855]: DEBUG nova.compute.claims [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2136.841097] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.841313] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.860221] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2136.913312] env[61855]: DEBUG oslo_vmware.rw_handles [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2136.970958] env[61855]: DEBUG oslo_vmware.rw_handles [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2136.971161] env[61855]: DEBUG oslo_vmware.rw_handles [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2137.041770] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a9edf2-2737-452b-956b-7aba3528c562 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.049275] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691feb89-b305-4efb-b233-ce9717bd4e13 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.080249] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886549f1-bbfe-4be4-9c92-6bd047f41466 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.087010] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ec33b6-5355-426e-9ff7-bc001e87ba59 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.099765] env[61855]: DEBUG nova.compute.provider_tree [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2137.108513] env[61855]: DEBUG nova.scheduler.client.report [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2137.121620] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.280s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.122153] env[61855]: ERROR nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2137.122153] env[61855]: Faults: ['InvalidArgument'] [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Traceback (most recent call last): [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self.driver.spawn(context, instance, image_meta, [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self._fetch_image_if_missing(context, vi) [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] image_cache(vi, tmp_image_ds_loc) [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] vm_util.copy_virtual_disk( [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] session._wait_for_task(vmdk_copy_task) [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] return self.wait_for_task(task_ref) [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] return evt.wait() [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] result = hub.switch() [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] return self.greenlet.switch() [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] self.f(*self.args, **self.kw) [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] raise exceptions.translate_fault(task_info.error) [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Faults: ['InvalidArgument'] [ 2137.122153] env[61855]: ERROR nova.compute.manager [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] [ 2137.122954] env[61855]: DEBUG nova.compute.utils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2137.124326] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Build of instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 was re-scheduled: A specified parameter was not correct: fileType [ 2137.124326] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2137.124697] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2137.124872] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2137.125058] env[61855]: DEBUG nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2137.125226] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2137.406766] env[61855]: DEBUG nova.network.neutron [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.417352] env[61855]: INFO nova.compute.manager [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Took 0.29 seconds to deallocate network for instance. [ 2137.518321] env[61855]: INFO nova.scheduler.client.report [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted allocations for instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 [ 2137.563971] env[61855]: DEBUG oslo_concurrency.lockutils [None req-933b01bd-a529-448f-8469-deaa93b478f5 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 553.605s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.564273] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 378.279s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.564476] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] During sync_power_state the instance has a pending task (spawning). Skip. [ 2137.564674] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.565229] env[61855]: DEBUG oslo_concurrency.lockutils [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 357.364s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.565496] env[61855]: DEBUG oslo_concurrency.lockutils [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2137.565712] env[61855]: DEBUG oslo_concurrency.lockutils [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.565880] env[61855]: DEBUG oslo_concurrency.lockutils [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.567901] env[61855]: INFO nova.compute.manager [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Terminating instance [ 2137.569692] env[61855]: DEBUG nova.compute.manager [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2137.569892] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2137.570171] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8509e188-d774-4038-95eb-d7d3c2b2adeb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.580160] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8daad41e-1792-4290-a6c7-b36173f0d2c1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.607808] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance feaec64e-abbd-438a-b1e6-a1ae2c0f2b80 could not be found. [ 2137.608026] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2137.608220] env[61855]: INFO nova.compute.manager [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2137.608506] env[61855]: DEBUG oslo.service.loopingcall [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2137.608780] env[61855]: DEBUG nova.compute.manager [-] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2137.608883] env[61855]: DEBUG nova.network.neutron [-] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2137.637222] env[61855]: DEBUG nova.network.neutron [-] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.645645] env[61855]: INFO nova.compute.manager [-] [instance: feaec64e-abbd-438a-b1e6-a1ae2c0f2b80] Took 0.04 seconds to deallocate network for instance. [ 2137.747172] env[61855]: DEBUG oslo_concurrency.lockutils [None req-57e8d09b-0708-4cb2-a9a2-9a1ab6ab024e tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "feaec64e-abbd-438a-b1e6-a1ae2c0f2b80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.287029] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2157.925155] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2158.925187] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2159.924410] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2160.924758] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2160.925169] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2160.925169] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2160.944233] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2160.944409] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2160.944520] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2160.944657] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2160.944781] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2160.944901] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2160.945085] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2160.945161] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2160.945710] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2160.945854] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2162.924411] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2164.925148] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2164.936345] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.936569] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2164.936740] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2164.936895] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2164.938393] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20758897-9632-408e-ae9f-67e929e41cb3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.946777] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e10958-eda4-4203-a008-acf36c09120d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.960271] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e9663b-531e-4502-b7d1-16a15ed6f4b8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.966343] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c262a7c-945d-4236-8b60-49b543f3d238 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.995357] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180674MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2164.995500] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.995693] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.060660] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.060830] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.060959] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.061096] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.061218] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.061338] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.061455] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.061631] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2165.061777] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=110GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2165.150319] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328cc3b9-a19e-4b17-9fc9-3261dffc8a77 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.157631] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebd6681-6923-413f-bdb3-2ac7d0f9acc7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.186995] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1db436c-571e-4535-889b-cd89f5c675c0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.194151] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a9d9cc-808f-4d7a-8750-a0cc47283d99 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.206785] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2165.215107] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2165.228341] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2165.228525] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.233s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2166.228248] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2174.179788] env[61855]: DEBUG oslo_concurrency.lockutils [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.053601] env[61855]: DEBUG oslo_concurrency.lockutils [None req-f85fca9a-e106-4368-95d9-128bc14e0f1f tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "aa42a06f-c679-4530-8762-15ea8dc35d9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.712822] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "bb506aaf-a599-4619-8035-a60952973f0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.713122] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "bb506aaf-a599-4619-8035-a60952973f0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.723298] env[61855]: DEBUG nova.compute.manager [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2181.775902] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.776157] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.779561] env[61855]: INFO nova.compute.claims [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2181.921252] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186f5086-5048-44ca-aa04-76f4ee62776d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.929571] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e162ab60-42d2-4dc7-8b2d-53e2115debd4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.960829] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a7fe5e-3f23-40b6-8698-5a82b0ee6f44 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.967503] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a978b0-3198-4a16-945f-b974abcc9705 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.980735] env[61855]: DEBUG nova.compute.provider_tree [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2181.989180] env[61855]: DEBUG nova.scheduler.client.report [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2182.002603] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.226s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.003099] env[61855]: DEBUG nova.compute.manager [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2182.033782] env[61855]: DEBUG nova.compute.utils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2182.035183] env[61855]: DEBUG nova.compute.manager [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2182.035371] env[61855]: DEBUG nova.network.neutron [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2182.045993] env[61855]: DEBUG nova.compute.manager [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2182.096334] env[61855]: DEBUG nova.policy [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24e7161122834e938ca6156e3f8c2855', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1eb57982dd094432baccff494449adad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 2182.107685] env[61855]: DEBUG nova.compute.manager [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2182.131806] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2182.132068] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2182.132244] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2182.132434] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2182.132585] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2182.132738] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2182.132951] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2182.133134] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2182.133304] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2182.133493] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2182.133643] env[61855]: DEBUG nova.virt.hardware [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2182.134549] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d638bac-7976-4f10-8821-a5434e107ac7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.142217] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9d5fca-1898-4953-ac6a-aca0464e2415 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.437954] env[61855]: DEBUG nova.network.neutron [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Successfully created port: c0620ab1-70bb-4896-8fc2-16ff441d03ba {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2183.004873] env[61855]: DEBUG nova.compute.manager [req-6190be31-32e0-4767-8321-3110f1d5e367 req-d0b86bda-bfb8-46db-be33-a92f22d75103 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Received event network-vif-plugged-c0620ab1-70bb-4896-8fc2-16ff441d03ba {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2183.005177] env[61855]: DEBUG oslo_concurrency.lockutils [req-6190be31-32e0-4767-8321-3110f1d5e367 req-d0b86bda-bfb8-46db-be33-a92f22d75103 service nova] Acquiring lock "bb506aaf-a599-4619-8035-a60952973f0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.005322] env[61855]: DEBUG oslo_concurrency.lockutils [req-6190be31-32e0-4767-8321-3110f1d5e367 req-d0b86bda-bfb8-46db-be33-a92f22d75103 service nova] Lock "bb506aaf-a599-4619-8035-a60952973f0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.005491] env[61855]: DEBUG oslo_concurrency.lockutils [req-6190be31-32e0-4767-8321-3110f1d5e367 req-d0b86bda-bfb8-46db-be33-a92f22d75103 service nova] Lock "bb506aaf-a599-4619-8035-a60952973f0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.005657] env[61855]: DEBUG nova.compute.manager [req-6190be31-32e0-4767-8321-3110f1d5e367 req-d0b86bda-bfb8-46db-be33-a92f22d75103 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] No waiting events found dispatching network-vif-plugged-c0620ab1-70bb-4896-8fc2-16ff441d03ba {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2183.006312] env[61855]: WARNING nova.compute.manager [req-6190be31-32e0-4767-8321-3110f1d5e367 req-d0b86bda-bfb8-46db-be33-a92f22d75103 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Received unexpected event network-vif-plugged-c0620ab1-70bb-4896-8fc2-16ff441d03ba for instance with vm_state building and task_state spawning. [ 2183.081487] env[61855]: DEBUG nova.network.neutron [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Successfully updated port: c0620ab1-70bb-4896-8fc2-16ff441d03ba {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2183.096115] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "refresh_cache-bb506aaf-a599-4619-8035-a60952973f0c" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2183.096264] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "refresh_cache-bb506aaf-a599-4619-8035-a60952973f0c" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.096415] env[61855]: DEBUG nova.network.neutron [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2183.136783] env[61855]: DEBUG nova.network.neutron [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2183.296611] env[61855]: DEBUG nova.network.neutron [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Updating instance_info_cache with network_info: [{"id": "c0620ab1-70bb-4896-8fc2-16ff441d03ba", "address": "fa:16:3e:94:c9:9a", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0620ab1-70", "ovs_interfaceid": "c0620ab1-70bb-4896-8fc2-16ff441d03ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.307920] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "refresh_cache-bb506aaf-a599-4619-8035-a60952973f0c" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.308238] env[61855]: DEBUG nova.compute.manager [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Instance network_info: |[{"id": "c0620ab1-70bb-4896-8fc2-16ff441d03ba", "address": "fa:16:3e:94:c9:9a", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0620ab1-70", "ovs_interfaceid": "c0620ab1-70bb-4896-8fc2-16ff441d03ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2183.308637] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:c9:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0620ab1-70bb-4896-8fc2-16ff441d03ba', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2183.316331] env[61855]: DEBUG oslo.service.loopingcall [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2183.316788] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2183.317033] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfcebc4b-794a-4365-9422-1a2cfd5fbe9a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.341618] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2183.341618] env[61855]: value = "task-4302984" [ 2183.341618] env[61855]: _type = "Task" [ 2183.341618] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.353452] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302984, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.852556] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302984, 'name': CreateVM_Task, 'duration_secs': 0.276334} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.852979] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2183.853627] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2183.853796] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.854138] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2183.854382] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e2f09cf-05cf-4aec-b15f-7cdb04898a6a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.858567] env[61855]: DEBUG oslo_vmware.api [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 2183.858567] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]520c1f7f-e198-a009-4974-f653d1445c7c" [ 2183.858567] env[61855]: _type = "Task" [ 2183.858567] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.865585] env[61855]: DEBUG oslo_vmware.api [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]520c1f7f-e198-a009-4974-f653d1445c7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.144983] env[61855]: WARNING oslo_vmware.rw_handles [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2184.144983] env[61855]: ERROR oslo_vmware.rw_handles [ 2184.145685] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2184.147469] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2184.147773] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Copying Virtual Disk [datastore2] vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/33617e72-2e17-4f05-8b6c-3dfd6f8f140f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2184.148174] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5d722df-2daa-444d-8013-e5db39bacc2a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.155785] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 2184.155785] env[61855]: value = "task-4302985" [ 2184.155785] env[61855]: _type = "Task" [ 2184.155785] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.164177] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': task-4302985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.369436] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.369686] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2184.369896] env[61855]: DEBUG oslo_concurrency.lockutils [None req-73f0b34e-6218-4aa3-9d32-6cf11cd97dd5 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.665932] env[61855]: DEBUG oslo_vmware.exceptions [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2184.666223] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.666810] env[61855]: ERROR nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.666810] env[61855]: Faults: ['InvalidArgument'] [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Traceback (most recent call last): [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] yield resources [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self.driver.spawn(context, instance, image_meta, [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self._fetch_image_if_missing(context, vi) [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] image_cache(vi, tmp_image_ds_loc) [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] vm_util.copy_virtual_disk( [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] session._wait_for_task(vmdk_copy_task) [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] return self.wait_for_task(task_ref) [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] return evt.wait() [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] result = hub.switch() [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] return self.greenlet.switch() [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self.f(*self.args, **self.kw) [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] raise exceptions.translate_fault(task_info.error) [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Faults: ['InvalidArgument'] [ 2184.666810] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] [ 2184.667759] env[61855]: INFO nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Terminating instance [ 2184.668768] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.669031] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2184.669288] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6127c24d-2c62-4847-bacd-dcfb8770478d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.671393] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2184.671591] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2184.672333] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911a107a-e322-4ce8-a437-dec99b967830 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.679238] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2184.679482] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0276593f-4b0e-4a9f-91b2-b43d503f07eb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.681556] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2184.681731] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2184.682695] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d736533-af20-4e38-b071-f2ec7f1bf93c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.687322] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 2184.687322] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52598efa-8c58-5525-4969-ab842349c724" [ 2184.687322] env[61855]: _type = "Task" [ 2184.687322] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.694362] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52598efa-8c58-5525-4969-ab842349c724, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.735345] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2184.735567] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2184.735777] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Deleting the datastore file [datastore2] ba293405-d3ea-4a1d-b21d-c44bff58dcb6 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2184.736072] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-051f0364-3e37-4340-b508-88ff835c8809 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.741867] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 2184.741867] env[61855]: value = "task-4302987" [ 2184.741867] env[61855]: _type = "Task" [ 2184.741867] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.749357] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': task-4302987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.031363] env[61855]: DEBUG nova.compute.manager [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Received event network-changed-c0620ab1-70bb-4896-8fc2-16ff441d03ba {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2185.031557] env[61855]: DEBUG nova.compute.manager [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Refreshing instance network info cache due to event network-changed-c0620ab1-70bb-4896-8fc2-16ff441d03ba. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2185.031770] env[61855]: DEBUG oslo_concurrency.lockutils [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] Acquiring lock "refresh_cache-bb506aaf-a599-4619-8035-a60952973f0c" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2185.031916] env[61855]: DEBUG oslo_concurrency.lockutils [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] Acquired lock "refresh_cache-bb506aaf-a599-4619-8035-a60952973f0c" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2185.032355] env[61855]: DEBUG nova.network.neutron [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Refreshing network info cache for port c0620ab1-70bb-4896-8fc2-16ff441d03ba {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2185.197709] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2185.198078] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating directory with path [datastore2] vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2185.198318] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8d9ca96-04ec-48a3-baf8-ed2b1dd054bc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.210034] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created directory with path [datastore2] vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2185.210316] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Fetch image to [datastore2] vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2185.210492] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2185.211262] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6534e979-f3b4-4bcd-ad5f-c3216d89fbeb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.217699] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab17c4c-61b0-43a0-8a02-34934de1b420 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.227015] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aade1206-94e4-4fc6-a6c7-60f27ed24ef7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.262616] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afe4425-5a18-477b-a870-648dab075cb0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.271161] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3b974979-a8e5-492c-9ea5-2a4c3f11480f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.272787] env[61855]: DEBUG oslo_vmware.api [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': task-4302987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068375} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.273036] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2185.273223] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2185.273394] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2185.273563] env[61855]: INFO nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2185.275640] env[61855]: DEBUG nova.compute.claims [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2185.275839] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.276079] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.296371] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2185.311613] env[61855]: DEBUG nova.network.neutron [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Updated VIF entry in instance network info cache for port c0620ab1-70bb-4896-8fc2-16ff441d03ba. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2185.311986] env[61855]: DEBUG nova.network.neutron [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Updating instance_info_cache with network_info: [{"id": "c0620ab1-70bb-4896-8fc2-16ff441d03ba", "address": "fa:16:3e:94:c9:9a", "network": {"id": "85e06a61-2c99-4e9b-bb42-6580ff356d9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1370759918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1eb57982dd094432baccff494449adad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0620ab1-70", "ovs_interfaceid": "c0620ab1-70bb-4896-8fc2-16ff441d03ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.326537] env[61855]: DEBUG oslo_concurrency.lockutils [req-cd3869f4-7f6d-4a0c-8210-5eeb7e47fbdc req-1f1791b5-3e0a-4726-b20d-e233758b5088 service nova] Releasing lock "refresh_cache-bb506aaf-a599-4619-8035-a60952973f0c" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.353381] env[61855]: DEBUG oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2185.413154] env[61855]: DEBUG oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2185.413401] env[61855]: DEBUG oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2185.477677] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e78339-232b-414e-ba36-9139e040bfbe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.485476] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfd77a2-065f-4d52-965d-30d9338c50d5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.515054] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bef01d-488b-4bf5-89ff-88b64d402587 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.521869] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcbfd61-7fe7-4e71-afed-c2b8ec9de526 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.535362] env[61855]: DEBUG nova.compute.provider_tree [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2185.545290] env[61855]: DEBUG nova.scheduler.client.report [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2185.559992] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.284s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.560578] env[61855]: ERROR nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2185.560578] env[61855]: Faults: ['InvalidArgument'] [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Traceback (most recent call last): [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self.driver.spawn(context, instance, image_meta, [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self._fetch_image_if_missing(context, vi) [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] image_cache(vi, tmp_image_ds_loc) [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] vm_util.copy_virtual_disk( [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] session._wait_for_task(vmdk_copy_task) [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] return self.wait_for_task(task_ref) [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] return evt.wait() [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] result = hub.switch() [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] return self.greenlet.switch() [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] self.f(*self.args, **self.kw) [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] raise exceptions.translate_fault(task_info.error) [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Faults: ['InvalidArgument'] [ 2185.560578] env[61855]: ERROR nova.compute.manager [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] [ 2185.561417] env[61855]: DEBUG nova.compute.utils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2185.562714] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Build of instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 was re-scheduled: A specified parameter was not correct: fileType [ 2185.562714] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2185.563101] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2185.563288] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2185.563460] env[61855]: DEBUG nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2185.563624] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2185.884896] env[61855]: DEBUG nova.network.neutron [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.896568] env[61855]: INFO nova.compute.manager [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Took 0.33 seconds to deallocate network for instance. [ 2186.024813] env[61855]: INFO nova.scheduler.client.report [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Deleted allocations for instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 [ 2186.047487] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d0e85d96-716c-4c68-911f-1006912e833c tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.371s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.048107] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 426.762s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.048183] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] During sync_power_state the instance has a pending task (spawning). Skip. [ 2186.048368] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.048876] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 404.221s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.049126] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.049359] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.049538] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.052195] env[61855]: INFO nova.compute.manager [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Terminating instance [ 2186.054805] env[61855]: DEBUG nova.compute.manager [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2186.055105] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2186.055396] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e68e7ad2-fdef-42df-b6b9-bad152cd6d2f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.065076] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8292d6f1-87e7-48a5-8bd0-ea0f4e9524bb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.093757] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ba293405-d3ea-4a1d-b21d-c44bff58dcb6 could not be found. [ 2186.093985] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2186.094187] env[61855]: INFO nova.compute.manager [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2186.094446] env[61855]: DEBUG oslo.service.loopingcall [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2186.094678] env[61855]: DEBUG nova.compute.manager [-] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2186.094770] env[61855]: DEBUG nova.network.neutron [-] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2186.121229] env[61855]: DEBUG nova.network.neutron [-] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2186.132364] env[61855]: INFO nova.compute.manager [-] [instance: ba293405-d3ea-4a1d-b21d-c44bff58dcb6] Took 0.04 seconds to deallocate network for instance. [ 2186.229076] env[61855]: DEBUG oslo_concurrency.lockutils [None req-8c06c0e2-7e1e-42ff-81d0-c02f161f4c17 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "ba293405-d3ea-4a1d-b21d-c44bff58dcb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.919691] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2219.923624] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2220.924785] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2221.924586] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.924574] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.926069] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2222.926069] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2222.944428] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2222.944582] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2222.944719] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2222.944905] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2222.944997] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2222.945238] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2222.945386] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2222.945538] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2222.946045] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.946264] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2223.924821] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2225.924476] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2225.936673] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2225.936898] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2225.937082] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2225.937245] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2225.938424] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5136b5-ae5e-47dc-bd87-c5ddec60f050 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.947011] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72720b9d-44db-4413-8a57-515f6c60206b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.960322] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7521d35c-87f5-4dc9-82fe-49a7f085877a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.966129] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bd4991-0079-4dbc-913d-af24a8266dd9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.995634] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180679MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2225.995779] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2225.995967] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.059850] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e49e5769-b561-48e0-9009-21e71844238f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.060019] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance c63748b0-5d55-447f-a11d-f0704edc1e86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.060155] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.060280] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.060401] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.060522] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.060638] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bb506aaf-a599-4619-8035-a60952973f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2226.060816] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2226.060954] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=110GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2226.142748] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082c93d3-36e0-4e75-b8ee-9c4b6301bbee {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.150232] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d743e28-dc6b-4c83-b1b5-deee742bcb23 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.179479] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63de32a-079f-4b6c-8277-75d2c66ecf67 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.186084] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739eddc2-dcb4-4974-8caa-bf419c1abd46 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.198422] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2226.206396] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2226.220467] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2226.220658] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.225s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.215590] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.923665] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.166039] env[61855]: WARNING oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2235.166039] env[61855]: ERROR oslo_vmware.rw_handles [ 2235.166039] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2235.167502] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2235.167771] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Copying Virtual Disk [datastore2] vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/f3fe9477-4b60-4415-af9e-e84692632270/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2235.168081] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd3a8717-ce18-4305-ae4b-a0639a0e5f9e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.179390] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 2235.179390] env[61855]: value = "task-4302988" [ 2235.179390] env[61855]: _type = "Task" [ 2235.179390] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.187569] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.689307] env[61855]: DEBUG oslo_vmware.exceptions [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2235.689583] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2235.690167] env[61855]: ERROR nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.690167] env[61855]: Faults: ['InvalidArgument'] [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] Traceback (most recent call last): [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] yield resources [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self.driver.spawn(context, instance, image_meta, [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self._fetch_image_if_missing(context, vi) [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] image_cache(vi, tmp_image_ds_loc) [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] vm_util.copy_virtual_disk( [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] session._wait_for_task(vmdk_copy_task) [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] return self.wait_for_task(task_ref) [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] return evt.wait() [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] result = hub.switch() [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] return self.greenlet.switch() [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self.f(*self.args, **self.kw) [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] raise exceptions.translate_fault(task_info.error) [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] Faults: ['InvalidArgument'] [ 2235.690167] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] [ 2235.690950] env[61855]: INFO nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Terminating instance [ 2235.692106] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2235.692323] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2235.692565] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce6ef0bb-3ca8-4513-8c44-fa6feaa03ca9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.695335] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2235.695335] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2235.695763] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02708ba1-85b9-4244-b1b5-92fd5a8a7cf7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.701977] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2235.702192] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05f69ac5-9d5b-4b62-afbd-cdbe3b4bdfca {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.704203] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2235.704375] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2235.705321] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fae83008-2de9-4be2-a84d-38a0a9745f43 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.710124] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 2235.710124] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52916469-378a-5faf-09c9-1e4d967bba22" [ 2235.710124] env[61855]: _type = "Task" [ 2235.710124] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.717050] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52916469-378a-5faf-09c9-1e4d967bba22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.785894] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2235.786136] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2235.786332] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleting the datastore file [datastore2] e49e5769-b561-48e0-9009-21e71844238f {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2235.786593] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35830e17-6d48-4ccb-9485-fc51ee9cadca {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.792510] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 2235.792510] env[61855]: value = "task-4302990" [ 2235.792510] env[61855]: _type = "Task" [ 2235.792510] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.799960] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.220665] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2236.220947] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating directory with path [datastore2] vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2236.221187] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99ab3b65-3070-487b-abd7-c27096c1f0bd {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.232129] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Created directory with path [datastore2] vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2236.232323] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Fetch image to [datastore2] vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2236.232496] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2236.233213] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996711e7-0c20-432c-9b6f-1f7b7cd19f1a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.239404] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2489d1a0-5664-49f7-828d-a128492bee6d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.247972] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daad66e7-cb58-47b6-b174-9d1f679cf6bb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.280110] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33b553b-5183-4f25-8f6f-8e8fbe7501df {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.285377] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8c1447c1-471a-4e49-867e-1d4815d09d74 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.301162] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065741} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2236.301390] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2236.301570] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2236.301736] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2236.301907] env[61855]: INFO nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2236.304723] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2236.306967] env[61855]: DEBUG nova.compute.claims [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2236.307155] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.307374] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.359827] env[61855]: DEBUG oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2236.420645] env[61855]: DEBUG oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2236.420645] env[61855]: DEBUG oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2236.497644] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f94fcb-35a2-4396-ae48-5b53697d06e8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.504948] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c01823-9139-404b-9c24-0a6da5f5622b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.535661] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2555910b-8aae-4491-bf00-9e658c221584 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.543083] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc90b716-b2b3-4c99-9297-e7e9122b4fe4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.556258] env[61855]: DEBUG nova.compute.provider_tree [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2236.564623] env[61855]: DEBUG nova.scheduler.client.report [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2236.578551] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.271s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.579107] env[61855]: ERROR nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.579107] env[61855]: Faults: ['InvalidArgument'] [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] Traceback (most recent call last): [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self.driver.spawn(context, instance, image_meta, [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self._fetch_image_if_missing(context, vi) [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] image_cache(vi, tmp_image_ds_loc) [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] vm_util.copy_virtual_disk( [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] session._wait_for_task(vmdk_copy_task) [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] return self.wait_for_task(task_ref) [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] return evt.wait() [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] result = hub.switch() [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] return self.greenlet.switch() [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] self.f(*self.args, **self.kw) [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] raise exceptions.translate_fault(task_info.error) [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] Faults: ['InvalidArgument'] [ 2236.579107] env[61855]: ERROR nova.compute.manager [instance: e49e5769-b561-48e0-9009-21e71844238f] [ 2236.579858] env[61855]: DEBUG nova.compute.utils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2236.581611] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Build of instance e49e5769-b561-48e0-9009-21e71844238f was re-scheduled: A specified parameter was not correct: fileType [ 2236.581611] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2236.581995] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2236.582189] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2236.582375] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2236.582543] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2236.955855] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.973836] env[61855]: INFO nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Took 0.39 seconds to deallocate network for instance. [ 2237.078560] env[61855]: INFO nova.scheduler.client.report [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleted allocations for instance e49e5769-b561-48e0-9009-21e71844238f [ 2237.101236] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "e49e5769-b561-48e0-9009-21e71844238f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 580.504s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.101425] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "e49e5769-b561-48e0-9009-21e71844238f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 477.816s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.101628] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e49e5769-b561-48e0-9009-21e71844238f] During sync_power_state the instance has a pending task (spawning). Skip. [ 2237.101809] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "e49e5769-b561-48e0-9009-21e71844238f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.102342] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "e49e5769-b561-48e0-9009-21e71844238f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 384.281s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.102567] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "e49e5769-b561-48e0-9009-21e71844238f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.102779] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "e49e5769-b561-48e0-9009-21e71844238f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.102946] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "e49e5769-b561-48e0-9009-21e71844238f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.104839] env[61855]: INFO nova.compute.manager [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Terminating instance [ 2237.106627] env[61855]: DEBUG nova.compute.manager [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2237.106827] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2237.107109] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3dc10aa-75bb-4e1d-b716-dc5e4a7453a1 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.117624] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111ce78f-7f96-4bdf-9329-43e216bca2d6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.144691] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e49e5769-b561-48e0-9009-21e71844238f could not be found. [ 2237.144907] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2237.145103] env[61855]: INFO nova.compute.manager [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: e49e5769-b561-48e0-9009-21e71844238f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2237.145351] env[61855]: DEBUG oslo.service.loopingcall [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2237.145827] env[61855]: DEBUG nova.compute.manager [-] [instance: e49e5769-b561-48e0-9009-21e71844238f] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2237.145925] env[61855]: DEBUG nova.network.neutron [-] [instance: e49e5769-b561-48e0-9009-21e71844238f] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2237.169262] env[61855]: DEBUG nova.network.neutron [-] [instance: e49e5769-b561-48e0-9009-21e71844238f] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2237.176746] env[61855]: INFO nova.compute.manager [-] [instance: e49e5769-b561-48e0-9009-21e71844238f] Took 0.03 seconds to deallocate network for instance. [ 2237.274016] env[61855]: DEBUG oslo_concurrency.lockutils [None req-3036dd6b-0f3b-4b9b-9e98-c47b788cc0af tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "e49e5769-b561-48e0-9009-21e71844238f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.420038] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "127cd38a-4d53-4ca0-a806-e178937d7fe2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.420038] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "127cd38a-4d53-4ca0-a806-e178937d7fe2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.430146] env[61855]: DEBUG nova.compute.manager [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2258.501181] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.501436] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.502919] env[61855]: INFO nova.compute.claims [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2258.626439] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa66942-e346-4a94-9e5c-e9ac01254ee2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.634250] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e09bac9-9190-4c80-b6b8-9300823fd675 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.664487] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee516060-4653-4693-aed9-bb1f7d886021 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.670918] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07202b94-2466-402c-925a-fb1ad3ef32d5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.683813] env[61855]: DEBUG nova.compute.provider_tree [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2258.693520] env[61855]: DEBUG nova.scheduler.client.report [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2258.711374] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.210s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.711851] env[61855]: DEBUG nova.compute.manager [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2258.750227] env[61855]: DEBUG nova.compute.utils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2258.751721] env[61855]: DEBUG nova.compute.manager [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2258.751721] env[61855]: DEBUG nova.network.neutron [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2258.760345] env[61855]: DEBUG nova.compute.manager [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2258.814181] env[61855]: DEBUG nova.policy [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6555bca994941ef876a79f963c827e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e855a24a69fb43d8955f7f8fe8cbb3da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 2258.822191] env[61855]: DEBUG nova.compute.manager [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2258.846694] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2258.846885] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2258.847091] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2258.847248] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2258.847401] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2258.847552] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2258.847776] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2258.847938] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2258.848129] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2258.848300] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2258.848492] env[61855]: DEBUG nova.virt.hardware [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2258.849366] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ea4b44-ef25-4983-8a42-26ac564ffcf2 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.858841] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dc42c7-32ab-4063-b86d-efd7239ab834 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.203861] env[61855]: DEBUG nova.network.neutron [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Successfully created port: b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2259.826067] env[61855]: DEBUG nova.compute.manager [req-2eb1aa37-8e86-4103-9942-bacf3b39d0b7 req-64ac5c85-29e1-44f3-8bfe-cba407ac820e service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Received event network-vif-plugged-b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2259.826527] env[61855]: DEBUG oslo_concurrency.lockutils [req-2eb1aa37-8e86-4103-9942-bacf3b39d0b7 req-64ac5c85-29e1-44f3-8bfe-cba407ac820e service nova] Acquiring lock "127cd38a-4d53-4ca0-a806-e178937d7fe2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.827427] env[61855]: DEBUG oslo_concurrency.lockutils [req-2eb1aa37-8e86-4103-9942-bacf3b39d0b7 req-64ac5c85-29e1-44f3-8bfe-cba407ac820e service nova] Lock "127cd38a-4d53-4ca0-a806-e178937d7fe2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.827862] env[61855]: DEBUG oslo_concurrency.lockutils [req-2eb1aa37-8e86-4103-9942-bacf3b39d0b7 req-64ac5c85-29e1-44f3-8bfe-cba407ac820e service nova] Lock "127cd38a-4d53-4ca0-a806-e178937d7fe2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2259.828212] env[61855]: DEBUG nova.compute.manager [req-2eb1aa37-8e86-4103-9942-bacf3b39d0b7 req-64ac5c85-29e1-44f3-8bfe-cba407ac820e service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] No waiting events found dispatching network-vif-plugged-b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2259.828550] env[61855]: WARNING nova.compute.manager [req-2eb1aa37-8e86-4103-9942-bacf3b39d0b7 req-64ac5c85-29e1-44f3-8bfe-cba407ac820e service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Received unexpected event network-vif-plugged-b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9 for instance with vm_state building and task_state spawning. [ 2259.911181] env[61855]: DEBUG nova.network.neutron [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Successfully updated port: b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2259.947304] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "refresh_cache-127cd38a-4d53-4ca0-a806-e178937d7fe2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2259.947631] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "refresh_cache-127cd38a-4d53-4ca0-a806-e178937d7fe2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2259.947631] env[61855]: DEBUG nova.network.neutron [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2259.986421] env[61855]: DEBUG nova.network.neutron [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2260.195895] env[61855]: DEBUG nova.network.neutron [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Updating instance_info_cache with network_info: [{"id": "b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9", "address": "fa:16:3e:de:02:60", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb309e949-a7", "ovs_interfaceid": "b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.207610] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "refresh_cache-127cd38a-4d53-4ca0-a806-e178937d7fe2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.207610] env[61855]: DEBUG nova.compute.manager [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Instance network_info: |[{"id": "b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9", "address": "fa:16:3e:de:02:60", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb309e949-a7", "ovs_interfaceid": "b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2260.207953] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:02:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd891a662-7da0-4be5-8d0d-01b4ff70552a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2260.216235] env[61855]: DEBUG oslo.service.loopingcall [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2260.216815] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2260.217086] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-514b407c-f2ea-4c26-9ffa-5da36955e65e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.238608] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2260.238608] env[61855]: value = "task-4302991" [ 2260.238608] env[61855]: _type = "Task" [ 2260.238608] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.248259] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302991, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.748735] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302991, 'name': CreateVM_Task, 'duration_secs': 0.311337} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.748900] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2260.749594] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.749760] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.750099] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2260.750355] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70bbdc87-2dcd-4d81-b868-863f773b24be {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.754557] env[61855]: DEBUG oslo_vmware.api [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 2260.754557] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527cea3a-5783-216d-e7b1-5fd5110fbbd7" [ 2260.754557] env[61855]: _type = "Task" [ 2260.754557] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.761825] env[61855]: DEBUG oslo_vmware.api [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]527cea3a-5783-216d-e7b1-5fd5110fbbd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.265937] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2261.266301] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2261.266395] env[61855]: DEBUG oslo_concurrency.lockutils [None req-05cea88f-23b4-45c9-8d5a-0008d5ed954c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2261.852881] env[61855]: DEBUG nova.compute.manager [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Received event network-changed-b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2261.853110] env[61855]: DEBUG nova.compute.manager [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Refreshing instance network info cache due to event network-changed-b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2261.853330] env[61855]: DEBUG oslo_concurrency.lockutils [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] Acquiring lock "refresh_cache-127cd38a-4d53-4ca0-a806-e178937d7fe2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2261.853476] env[61855]: DEBUG oslo_concurrency.lockutils [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] Acquired lock "refresh_cache-127cd38a-4d53-4ca0-a806-e178937d7fe2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2261.853667] env[61855]: DEBUG nova.network.neutron [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Refreshing network info cache for port b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2262.094579] env[61855]: DEBUG nova.network.neutron [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Updated VIF entry in instance network info cache for port b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2262.094939] env[61855]: DEBUG nova.network.neutron [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Updating instance_info_cache with network_info: [{"id": "b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9", "address": "fa:16:3e:de:02:60", "network": {"id": "4c28043e-66bd-47a7-be58-88b19799c3ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-582468848-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e855a24a69fb43d8955f7f8fe8cbb3da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d891a662-7da0-4be5-8d0d-01b4ff70552a", "external-id": "nsx-vlan-transportzone-815", "segmentation_id": 815, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb309e949-a7", "ovs_interfaceid": "b309e949-a7ea-4a12-bdd6-d28a6c5a5fa9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2262.104156] env[61855]: DEBUG oslo_concurrency.lockutils [req-aafc19b5-9105-4527-a360-956a093cca58 req-fb1ac1aa-19b5-4022-99b0-8b25f9a31dc1 service nova] Releasing lock "refresh_cache-127cd38a-4d53-4ca0-a806-e178937d7fe2" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2279.919431] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2280.924324] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2280.924656] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2281.924642] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.370203] env[61855]: WARNING oslo_vmware.rw_handles [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.370203] env[61855]: ERROR oslo_vmware.rw_handles [ 2282.370649] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2282.372418] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2282.372676] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Copying Virtual Disk [datastore2] vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/cfb21827-b7ce-4917-a509-42d22d9bb87e/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2282.372965] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2652519a-eee6-4c63-9fa7-10a2383adaf4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.381015] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 2282.381015] env[61855]: value = "task-4302992" [ 2282.381015] env[61855]: _type = "Task" [ 2282.381015] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.388232] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.892723] env[61855]: DEBUG oslo_vmware.exceptions [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2282.893107] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.893726] env[61855]: ERROR nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.893726] env[61855]: Faults: ['InvalidArgument'] [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Traceback (most recent call last): [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] yield resources [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self.driver.spawn(context, instance, image_meta, [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self._fetch_image_if_missing(context, vi) [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] image_cache(vi, tmp_image_ds_loc) [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] vm_util.copy_virtual_disk( [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] session._wait_for_task(vmdk_copy_task) [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] return self.wait_for_task(task_ref) [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] return evt.wait() [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] result = hub.switch() [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] return self.greenlet.switch() [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self.f(*self.args, **self.kw) [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] raise exceptions.translate_fault(task_info.error) [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Faults: ['InvalidArgument'] [ 2282.893726] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] [ 2282.894615] env[61855]: INFO nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Terminating instance [ 2282.895617] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.895821] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2282.896071] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d119385-54a2-463b-aed2-2d6952b35c88 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.898353] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2282.898511] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2282.899278] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cbcf99-3279-451d-b34e-45cef7b954f5 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.906361] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2282.906605] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9e59cf9-8189-4616-a89d-ef46d04727d4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.908706] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2282.908888] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2282.909825] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eda777e-d432-4a94-8c39-7a04d5dfa68c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.914215] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 2282.914215] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5213d1f4-7eee-523e-8bbc-8b983013abc4" [ 2282.914215] env[61855]: _type = "Task" [ 2282.914215] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.921140] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]5213d1f4-7eee-523e-8bbc-8b983013abc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.923590] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.923760] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2282.972578] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2282.972913] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2282.972913] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleting the datastore file [datastore2] c63748b0-5d55-447f-a11d-f0704edc1e86 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2282.973207] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5307e880-fb36-4470-8a56-91c11d2de53c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.979364] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for the task: (returnval){ [ 2282.979364] env[61855]: value = "task-4302994" [ 2282.979364] env[61855]: _type = "Task" [ 2282.979364] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.986726] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.424497] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2283.424807] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating directory with path [datastore2] vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2283.425078] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80a9603a-3aac-4804-a3e3-e6ea7ff6f9a6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.436453] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Created directory with path [datastore2] vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2283.436669] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Fetch image to [datastore2] vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2283.436858] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2283.437674] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150c4f9f-da02-43ad-961d-4b07b3513f97 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.444043] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8c275c-e35c-4523-9c02-8851403d0386 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.453165] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf557a9-62d9-4e6a-ab65-1f5601b1bc86 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.486577] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97bb236-7c85-47f2-b304-f9b1ba63c702 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.495166] env[61855]: DEBUG oslo_vmware.api [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Task: {'id': task-4302994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07489} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.495707] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2283.495888] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2283.498159] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2283.498159] env[61855]: INFO nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2283.498159] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e20eccf9-783b-44ae-a8fd-e04d7668fc65 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.499767] env[61855]: DEBUG nova.compute.claims [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2283.499965] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.500210] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.521775] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2283.574354] env[61855]: DEBUG oslo_vmware.rw_handles [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2283.633913] env[61855]: DEBUG oslo_vmware.rw_handles [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2283.634124] env[61855]: DEBUG oslo_vmware.rw_handles [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2283.696155] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2c7725-dee7-4e74-9365-85d7ac542c2c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.702836] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13697ef-3859-4b85-98df-3569745b3001 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.731991] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145bba8e-8853-4b9d-967b-14f30f04f63d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.738348] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2aabb2-78e1-409a-a515-2cd0225ab5d8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.750853] env[61855]: DEBUG nova.compute.provider_tree [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2283.760247] env[61855]: DEBUG nova.scheduler.client.report [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2283.774104] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.274s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.774620] env[61855]: ERROR nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.774620] env[61855]: Faults: ['InvalidArgument'] [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Traceback (most recent call last): [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self.driver.spawn(context, instance, image_meta, [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self._fetch_image_if_missing(context, vi) [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] image_cache(vi, tmp_image_ds_loc) [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] vm_util.copy_virtual_disk( [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] session._wait_for_task(vmdk_copy_task) [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] return self.wait_for_task(task_ref) [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] return evt.wait() [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] result = hub.switch() [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] return self.greenlet.switch() [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] self.f(*self.args, **self.kw) [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] raise exceptions.translate_fault(task_info.error) [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Faults: ['InvalidArgument'] [ 2283.774620] env[61855]: ERROR nova.compute.manager [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] [ 2283.775492] env[61855]: DEBUG nova.compute.utils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2283.776635] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Build of instance c63748b0-5d55-447f-a11d-f0704edc1e86 was re-scheduled: A specified parameter was not correct: fileType [ 2283.776635] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2283.777052] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2283.777233] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2283.777408] env[61855]: DEBUG nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2283.777573] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2283.924247] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2284.101374] env[61855]: DEBUG nova.network.neutron [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.116047] env[61855]: INFO nova.compute.manager [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Took 0.34 seconds to deallocate network for instance. [ 2284.211396] env[61855]: INFO nova.scheduler.client.report [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Deleted allocations for instance c63748b0-5d55-447f-a11d-f0704edc1e86 [ 2284.233480] env[61855]: DEBUG oslo_concurrency.lockutils [None req-20fe80d6-e280-41f9-9d1d-d55d675fe563 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.610s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.233744] env[61855]: DEBUG oslo_concurrency.lockutils [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.490s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.234039] env[61855]: DEBUG oslo_concurrency.lockutils [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Acquiring lock "c63748b0-5d55-447f-a11d-f0704edc1e86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2284.234245] env[61855]: DEBUG oslo_concurrency.lockutils [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.234463] env[61855]: DEBUG oslo_concurrency.lockutils [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.236629] env[61855]: INFO nova.compute.manager [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Terminating instance [ 2284.238905] env[61855]: DEBUG nova.compute.manager [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2284.239079] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2284.239575] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b242469-777d-4c71-93c6-23f82a32648b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.252021] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acb4245-cfed-4f4c-8c20-dc3b15c8a1e9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.284835] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c63748b0-5d55-447f-a11d-f0704edc1e86 could not be found. [ 2284.285071] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2284.285267] env[61855]: INFO nova.compute.manager [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2284.285527] env[61855]: DEBUG oslo.service.loopingcall [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2284.286016] env[61855]: DEBUG nova.compute.manager [-] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2284.286128] env[61855]: DEBUG nova.network.neutron [-] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2284.309432] env[61855]: DEBUG nova.network.neutron [-] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.317235] env[61855]: INFO nova.compute.manager [-] [instance: c63748b0-5d55-447f-a11d-f0704edc1e86] Took 0.03 seconds to deallocate network for instance. [ 2284.403505] env[61855]: DEBUG oslo_concurrency.lockutils [None req-79af561e-bbc6-40fd-a8b1-9d9bbe7cda53 tempest-MultipleCreateTestJSON-1651908998 tempest-MultipleCreateTestJSON-1651908998-project-member] Lock "c63748b0-5d55-447f-a11d-f0704edc1e86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.924995] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2284.925227] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2284.925406] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2284.944396] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2284.944621] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2284.944711] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2284.945264] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2284.945264] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2284.945264] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2284.945487] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2285.924547] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2285.935440] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2285.935660] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2285.935828] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2285.935989] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2285.937147] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5414198c-24cc-4785-8a7f-aa714eabaec0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.945821] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d436e747-bbe1-4f49-a4bd-dd018ceed08a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.959305] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd187f07-5dad-4944-8259-c6c2e85e664b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.965369] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a00c08f-adee-47de-a0f5-3e2a86a524ff {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.994512] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180677MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2285.994652] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2285.994843] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.055510] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.055672] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.055805] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.055930] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.056071] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bb506aaf-a599-4619-8035-a60952973f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.056197] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 127cd38a-4d53-4ca0-a806-e178937d7fe2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.056380] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2286.056518] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=110GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2286.134655] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c0f3bc-dc0c-42eb-bac4-f14045ae4936 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.142026] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7e00b6-78d3-41b7-9da3-5efb286c0bfe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.171577] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b18063-3d99-4cf0-81e6-7bb83f31bcb8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.179044] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebfdb4d-464a-45c6-aa11-4308d7820db9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.191377] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2286.199905] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2286.214946] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2286.215211] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.220s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.214819] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.199095] env[61855]: WARNING oslo_vmware.rw_handles [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2329.199095] env[61855]: ERROR oslo_vmware.rw_handles [ 2329.199871] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2329.201331] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2329.201596] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Copying Virtual Disk [datastore2] vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/a52c07cc-fcca-4378-8fe9-c80a63a48d34/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2329.201879] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc542e24-f4f4-44d3-95e2-92c6f73e7bf3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.209262] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 2329.209262] env[61855]: value = "task-4302995" [ 2329.209262] env[61855]: _type = "Task" [ 2329.209262] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.217109] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': task-4302995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.719976] env[61855]: DEBUG oslo_vmware.exceptions [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2329.720308] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2329.720884] env[61855]: ERROR nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2329.720884] env[61855]: Faults: ['InvalidArgument'] [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Traceback (most recent call last): [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] yield resources [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self.driver.spawn(context, instance, image_meta, [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self._fetch_image_if_missing(context, vi) [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] image_cache(vi, tmp_image_ds_loc) [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] vm_util.copy_virtual_disk( [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] session._wait_for_task(vmdk_copy_task) [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] return self.wait_for_task(task_ref) [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] return evt.wait() [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] result = hub.switch() [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] return self.greenlet.switch() [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self.f(*self.args, **self.kw) [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] raise exceptions.translate_fault(task_info.error) [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Faults: ['InvalidArgument'] [ 2329.720884] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] [ 2329.721871] env[61855]: INFO nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Terminating instance [ 2329.722876] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.723114] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2329.723355] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac71c856-dd83-4d60-a598-e11c327a1ea4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.725432] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2329.725631] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2329.726380] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae37ebbc-ae51-47c3-8171-2d394ae1347c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.733462] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2329.733707] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e55a23f0-8823-417d-8ab5-56bf14612c56 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.735720] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2329.735899] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2329.736869] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b86512c2-12f8-40b5-86a0-ac377902124b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.741466] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 2329.741466] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52751bbb-5269-538f-9a85-5fc375441fad" [ 2329.741466] env[61855]: _type = "Task" [ 2329.741466] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.748718] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52751bbb-5269-538f-9a85-5fc375441fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.174371] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2330.175373] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2330.175373] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Deleting the datastore file [datastore2] 333ca086-f585-4325-9ba8-fbcdfc6650f5 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2330.175373] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e807ca17-40e8-4d66-9799-9ff737305fd3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.181686] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for the task: (returnval){ [ 2330.181686] env[61855]: value = "task-4302997" [ 2330.181686] env[61855]: _type = "Task" [ 2330.181686] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.190091] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': task-4302997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.251794] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2330.252143] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating directory with path [datastore2] vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2330.252411] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16919e31-4f36-4c5b-a64f-21f18438f0d9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.273388] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Created directory with path [datastore2] vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2330.273613] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Fetch image to [datastore2] vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2330.273765] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2330.274571] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca54a00-404f-44dd-85b4-b230d934c7fc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.281295] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d397519-684a-4d80-b6a2-20a8cf64fa22 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.290531] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a7aa54-6557-4e88-917e-102feeb3a1c9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.321290] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9eef49-07d7-4cd8-83ef-7b13da9b6886 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.326808] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-df6ad89c-646f-4464-8844-8f1e578b3f14 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.350243] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2330.430685] env[61855]: DEBUG oslo_vmware.rw_handles [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2330.488920] env[61855]: DEBUG oslo_vmware.rw_handles [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2330.489150] env[61855]: DEBUG oslo_vmware.rw_handles [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2330.691949] env[61855]: DEBUG oslo_vmware.api [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Task: {'id': task-4302997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075611} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.692252] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2330.692442] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2330.692621] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2330.692935] env[61855]: INFO nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Took 0.97 seconds to destroy the instance on the hypervisor. [ 2330.694945] env[61855]: DEBUG nova.compute.claims [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2330.695138] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.695358] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.824824] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f542a57-81ef-49ac-9e47-d4c57ee713d7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.831889] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ff7f4f-a0fe-4569-9fb6-11f833da7305 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.860669] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10938a4-874d-4af6-b000-cce355fa10e8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.867147] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfacaa94-9467-46cd-bd0a-d384002f8b12 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.880872] env[61855]: DEBUG nova.compute.provider_tree [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2330.890106] env[61855]: DEBUG nova.scheduler.client.report [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2330.904121] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.209s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.904607] env[61855]: ERROR nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2330.904607] env[61855]: Faults: ['InvalidArgument'] [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Traceback (most recent call last): [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self.driver.spawn(context, instance, image_meta, [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self._fetch_image_if_missing(context, vi) [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] image_cache(vi, tmp_image_ds_loc) [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] vm_util.copy_virtual_disk( [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] session._wait_for_task(vmdk_copy_task) [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] return self.wait_for_task(task_ref) [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] return evt.wait() [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] result = hub.switch() [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] return self.greenlet.switch() [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] self.f(*self.args, **self.kw) [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] raise exceptions.translate_fault(task_info.error) [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Faults: ['InvalidArgument'] [ 2330.904607] env[61855]: ERROR nova.compute.manager [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] [ 2330.905447] env[61855]: DEBUG nova.compute.utils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2330.907022] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Build of instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 was re-scheduled: A specified parameter was not correct: fileType [ 2330.907022] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2330.907433] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2330.907612] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2330.907786] env[61855]: DEBUG nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2330.907950] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2331.434848] env[61855]: DEBUG nova.network.neutron [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.448211] env[61855]: INFO nova.compute.manager [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Took 0.54 seconds to deallocate network for instance. [ 2331.566749] env[61855]: INFO nova.scheduler.client.report [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Deleted allocations for instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 [ 2331.591538] env[61855]: DEBUG oslo_concurrency.lockutils [None req-03d3906e-3b1b-4332-9edd-ee8b739e01a4 tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 542.977s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.591900] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 347.024s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.592214] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "333ca086-f585-4325-9ba8-fbcdfc6650f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2331.592510] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.592713] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.594670] env[61855]: INFO nova.compute.manager [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Terminating instance [ 2331.596465] env[61855]: DEBUG nova.compute.manager [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2331.596730] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2331.597325] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9ba2193-7f7a-4f6b-8402-052fc228b181 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.606812] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c489c2-95f3-48da-a2f0-94fd9a1e8cf4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.634041] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 333ca086-f585-4325-9ba8-fbcdfc6650f5 could not be found. [ 2331.634294] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2331.634485] env[61855]: INFO nova.compute.manager [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2331.634734] env[61855]: DEBUG oslo.service.loopingcall [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2331.635256] env[61855]: DEBUG nova.compute.manager [-] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2331.635365] env[61855]: DEBUG nova.network.neutron [-] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2331.660818] env[61855]: DEBUG nova.network.neutron [-] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.668610] env[61855]: INFO nova.compute.manager [-] [instance: 333ca086-f585-4325-9ba8-fbcdfc6650f5] Took 0.03 seconds to deallocate network for instance. [ 2331.751810] env[61855]: DEBUG oslo_concurrency.lockutils [None req-5f357bd4-69ba-423e-a185-d65d563e84fd tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Lock "333ca086-f585-4325-9ba8-fbcdfc6650f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.259575] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.260056] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Getting list of instances from cluster (obj){ [ 2333.260056] env[61855]: value = "domain-c8" [ 2333.260056] env[61855]: _type = "ClusterComputeResource" [ 2333.260056] env[61855]: } {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2333.261133] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f36940-b5d6-4d55-a851-3ef55ef711dc {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.273326] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Got total of 5 instances {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2339.924866] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.927533] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.927933] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2341.923946] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2341.923946] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.924101] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.924398] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.924478] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2345.924519] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.924838] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2345.924992] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2345.939940] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2345.940103] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2345.940243] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2345.940393] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2345.940538] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2345.940673] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2345.941223] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.941338] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61855) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2347.934206] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.945333] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.945511] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2347.945669] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2347.945829] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2347.946970] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab2e3ad-0254-4dfe-9803-4cdff137e0c3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.955641] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f591721-77a9-4f59-98b4-40768f1beb37 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.969134] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ab12b3-dad5-417f-aa91-d4cdf5e61163 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.975050] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25f1831-2726-4fbf-b8ea-1ccf9ee1765f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.004146] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180641MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2348.004293] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2348.004484] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2348.136017] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2348.136207] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2348.136337] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2348.136462] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bb506aaf-a599-4619-8035-a60952973f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2348.136584] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 127cd38a-4d53-4ca0-a806-e178937d7fe2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2348.136772] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2348.136913] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=110GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2348.203608] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c425fb6b-dd33-42ff-95dd-20d55a2f4871 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.211326] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2910edda-cbf5-4839-a9b0-efce56c53f73 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.240754] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49514b4a-8fe6-4319-83b5-59df11ad16e8 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.247455] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475cfb89-2f9d-4360-bc25-c55baf804eb4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.260233] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2348.268254] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2348.282526] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2348.282722] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.278s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2348.925061] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.925061] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Cleaning up deleted instances {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2348.934814] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] There are 0 instances to clean {{(pid=61855) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2349.929870] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2350.924717] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.777020] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "e8d09476-022c-4c0b-907d-bcc90d2d3963" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2371.777813] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "e8d09476-022c-4c0b-907d-bcc90d2d3963" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2371.790333] env[61855]: DEBUG nova.compute.manager [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2371.839780] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2371.840562] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2371.841523] env[61855]: INFO nova.compute.claims [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2371.975921] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add683b9-0506-403a-ab93-e574ebe8a2e7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.984262] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43e870e-5f3c-4b3e-b86b-c793221fef0d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.015269] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab00c82-a974-4ad6-87d2-99937ef58cdf {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.023197] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47be8d86-7dda-4efa-8247-18332c21e97a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.036600] env[61855]: DEBUG nova.compute.provider_tree [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2372.045245] env[61855]: DEBUG nova.scheduler.client.report [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2372.060997] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.221s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2372.061535] env[61855]: DEBUG nova.compute.manager [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2372.117255] env[61855]: DEBUG nova.compute.utils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2372.118814] env[61855]: DEBUG nova.compute.manager [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2372.119042] env[61855]: DEBUG nova.network.neutron [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2372.132721] env[61855]: DEBUG nova.compute.manager [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2372.195902] env[61855]: DEBUG nova.policy [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5256e05619f744e988b78876f04b7286', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a6c123dab04b01868b291d2b953e75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 2372.210361] env[61855]: DEBUG nova.compute.manager [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2372.227092] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "84197864-dc04-4eec-80d4-7800b2892507" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2372.227419] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "84197864-dc04-4eec-80d4-7800b2892507" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.238417] env[61855]: DEBUG nova.compute.manager [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Starting instance... {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2372.245191] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2372.245534] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2372.245770] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2372.246069] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2372.246304] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2372.246535] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2372.246834] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2372.247094] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2372.247438] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2372.247691] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2372.247960] env[61855]: DEBUG nova.virt.hardware [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2372.249812] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9235014-aaee-4c11-a416-ab3f52da8c42 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.262068] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73345141-3a4d-4bb6-92f9-aa24424987a0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.304686] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2372.305146] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.307974] env[61855]: INFO nova.compute.claims [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2372.502487] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ae884a-bb07-431a-89b6-434cbf830071 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.511525] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d36bb9-10f9-4345-b8e8-10ba5a66396e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.546658] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609236ad-64cc-44b1-a784-07c159e4978c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.553746] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abdecae-2fe4-4cd1-ad2c-a8d7df42426c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.567848] env[61855]: DEBUG nova.compute.provider_tree [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2372.577950] env[61855]: DEBUG nova.scheduler.client.report [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2372.604600] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.298s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2372.604600] env[61855]: DEBUG nova.compute.manager [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Start building networks asynchronously for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2372.649957] env[61855]: DEBUG nova.network.neutron [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Successfully created port: 1d58cd96-5241-45a2-81e5-2903376d54c4 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2372.656204] env[61855]: DEBUG nova.compute.utils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Using /dev/sd instead of None {{(pid=61855) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2372.657577] env[61855]: DEBUG nova.compute.manager [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Allocating IP information in the background. {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2372.657758] env[61855]: DEBUG nova.network.neutron [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] allocate_for_instance() {{(pid=61855) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2372.671858] env[61855]: DEBUG nova.compute.manager [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Start building block device mappings for instance. {{(pid=61855) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2372.760508] env[61855]: DEBUG nova.policy [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6365552b80dc401d8a166f179d231b10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ec8ab8fc8404ec8a37780aa6e6fd40e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61855) authorize /opt/stack/nova/nova/policy.py:203}} [ 2372.770148] env[61855]: DEBUG nova.compute.manager [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Start spawning the instance on the hypervisor. {{(pid=61855) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2372.800285] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T19:26:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T19:25:57Z,direct_url=,disk_format='vmdk',id=c2cb7e7f-87ef-46fd-89b3-fd48992318fc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9af591690d3f45af8302cad2a4578940',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T19:25:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2372.800707] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2372.800707] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image limits 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2372.800871] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Flavor pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2372.801191] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Image pref 0:0:0 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2372.801401] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61855) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2372.801624] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2372.801797] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2372.801973] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Got 1 possible topologies {{(pid=61855) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2372.802171] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2372.802357] env[61855]: DEBUG nova.virt.hardware [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61855) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2372.803273] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894292ff-f371-47fb-a798-0934fb58a59c {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.813987] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e096e2-b932-4090-bc6b-270f52cb3a1e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.337822] env[61855]: DEBUG nova.network.neutron [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Successfully created port: 404934cc-7c92-4d82-8ee7-78fb9e34f3f5 {{(pid=61855) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2373.642099] env[61855]: DEBUG nova.compute.manager [req-dbc5c516-5fde-41b5-91e8-1f3289c160f7 req-4d8a781b-de55-4b1b-8535-608b925a9593 service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Received event network-vif-plugged-1d58cd96-5241-45a2-81e5-2903376d54c4 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2373.642395] env[61855]: DEBUG oslo_concurrency.lockutils [req-dbc5c516-5fde-41b5-91e8-1f3289c160f7 req-4d8a781b-de55-4b1b-8535-608b925a9593 service nova] Acquiring lock "e8d09476-022c-4c0b-907d-bcc90d2d3963-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.642617] env[61855]: DEBUG oslo_concurrency.lockutils [req-dbc5c516-5fde-41b5-91e8-1f3289c160f7 req-4d8a781b-de55-4b1b-8535-608b925a9593 service nova] Lock "e8d09476-022c-4c0b-907d-bcc90d2d3963-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.642793] env[61855]: DEBUG oslo_concurrency.lockutils [req-dbc5c516-5fde-41b5-91e8-1f3289c160f7 req-4d8a781b-de55-4b1b-8535-608b925a9593 service nova] Lock "e8d09476-022c-4c0b-907d-bcc90d2d3963-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.642969] env[61855]: DEBUG nova.compute.manager [req-dbc5c516-5fde-41b5-91e8-1f3289c160f7 req-4d8a781b-de55-4b1b-8535-608b925a9593 service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] No waiting events found dispatching network-vif-plugged-1d58cd96-5241-45a2-81e5-2903376d54c4 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2373.643325] env[61855]: WARNING nova.compute.manager [req-dbc5c516-5fde-41b5-91e8-1f3289c160f7 req-4d8a781b-de55-4b1b-8535-608b925a9593 service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Received unexpected event network-vif-plugged-1d58cd96-5241-45a2-81e5-2903376d54c4 for instance with vm_state building and task_state spawning. [ 2373.875491] env[61855]: DEBUG nova.network.neutron [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Successfully updated port: 1d58cd96-5241-45a2-81e5-2903376d54c4 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2373.887309] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "refresh_cache-e8d09476-022c-4c0b-907d-bcc90d2d3963" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.887498] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "refresh_cache-e8d09476-022c-4c0b-907d-bcc90d2d3963" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.887684] env[61855]: DEBUG nova.network.neutron [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2373.939499] env[61855]: DEBUG nova.network.neutron [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2374.212507] env[61855]: DEBUG nova.network.neutron [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Updating instance_info_cache with network_info: [{"id": "1d58cd96-5241-45a2-81e5-2903376d54c4", "address": "fa:16:3e:15:9e:c3", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d58cd96-52", "ovs_interfaceid": "1d58cd96-5241-45a2-81e5-2903376d54c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2374.226045] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "refresh_cache-e8d09476-022c-4c0b-907d-bcc90d2d3963" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.226343] env[61855]: DEBUG nova.compute.manager [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Instance network_info: |[{"id": "1d58cd96-5241-45a2-81e5-2903376d54c4", "address": "fa:16:3e:15:9e:c3", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d58cd96-52", "ovs_interfaceid": "1d58cd96-5241-45a2-81e5-2903376d54c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2374.226747] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:9e:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f925dc8-2145-457e-a4d4-c07117356dd0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d58cd96-5241-45a2-81e5-2903376d54c4', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2374.234485] env[61855]: DEBUG oslo.service.loopingcall [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2374.234959] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2374.235208] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6736c926-3a91-4545-ba7a-d1076a283e10 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.255413] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2374.255413] env[61855]: value = "task-4302998" [ 2374.255413] env[61855]: _type = "Task" [ 2374.255413] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.264311] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302998, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.388682] env[61855]: DEBUG nova.network.neutron [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Successfully updated port: 404934cc-7c92-4d82-8ee7-78fb9e34f3f5 {{(pid=61855) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2374.401633] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "refresh_cache-84197864-dc04-4eec-80d4-7800b2892507" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2374.401633] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "refresh_cache-84197864-dc04-4eec-80d4-7800b2892507" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2374.401783] env[61855]: DEBUG nova.network.neutron [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Building network info cache for instance {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2374.444665] env[61855]: DEBUG nova.network.neutron [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Instance cache missing network info. {{(pid=61855) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2374.764718] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302998, 'name': CreateVM_Task, 'duration_secs': 0.299745} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.764887] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2374.765686] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2374.765855] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2374.766187] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2374.766435] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b6e8320-0642-4e66-8c46-00c00217789e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.770806] env[61855]: DEBUG oslo_vmware.api [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 2374.770806] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]528052c2-5828-44df-aea5-fd1949bf60f4" [ 2374.770806] env[61855]: _type = "Task" [ 2374.770806] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.779560] env[61855]: DEBUG oslo_vmware.api [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]528052c2-5828-44df-aea5-fd1949bf60f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.781603] env[61855]: DEBUG nova.network.neutron [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Updating instance_info_cache with network_info: [{"id": "404934cc-7c92-4d82-8ee7-78fb9e34f3f5", "address": "fa:16:3e:45:a9:bf", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap404934cc-7c", "ovs_interfaceid": "404934cc-7c92-4d82-8ee7-78fb9e34f3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2374.794174] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "refresh_cache-84197864-dc04-4eec-80d4-7800b2892507" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.794450] env[61855]: DEBUG nova.compute.manager [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Instance network_info: |[{"id": "404934cc-7c92-4d82-8ee7-78fb9e34f3f5", "address": "fa:16:3e:45:a9:bf", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap404934cc-7c", "ovs_interfaceid": "404934cc-7c92-4d82-8ee7-78fb9e34f3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61855) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2374.794826] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:a9:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '404934cc-7c92-4d82-8ee7-78fb9e34f3f5', 'vif_model': 'vmxnet3'}] {{(pid=61855) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2374.802554] env[61855]: DEBUG oslo.service.loopingcall [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2374.803059] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Creating VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2374.803290] env[61855]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bf03f46-4003-4a21-a6aa-80c3786b944a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.823263] env[61855]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2374.823263] env[61855]: value = "task-4302999" [ 2374.823263] env[61855]: _type = "Task" [ 2374.823263] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.830542] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302999, 'name': CreateVM_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.281561] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2375.281964] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2375.282060] env[61855]: DEBUG oslo_concurrency.lockutils [None req-7c77dc61-ba07-41ee-b61e-ab18c12f1f0d tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2375.333757] env[61855]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302999, 'name': CreateVM_Task, 'duration_secs': 0.328562} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.334051] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Created VM on the ESX host {{(pid=61855) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2375.334726] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2375.334898] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2375.335235] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2375.335482] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e23c10e6-73e5-42f3-8c95-4d715ae68e25 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.339696] env[61855]: DEBUG oslo_vmware.api [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 2375.339696] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52feef8e-879b-89b8-99e3-7a5a1b85908c" [ 2375.339696] env[61855]: _type = "Task" [ 2375.339696] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.346779] env[61855]: DEBUG oslo_vmware.api [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52feef8e-879b-89b8-99e3-7a5a1b85908c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.680771] env[61855]: DEBUG nova.compute.manager [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Received event network-changed-1d58cd96-5241-45a2-81e5-2903376d54c4 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2375.680771] env[61855]: DEBUG nova.compute.manager [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Refreshing instance network info cache due to event network-changed-1d58cd96-5241-45a2-81e5-2903376d54c4. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2375.680903] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Acquiring lock "refresh_cache-e8d09476-022c-4c0b-907d-bcc90d2d3963" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2375.681066] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Acquired lock "refresh_cache-e8d09476-022c-4c0b-907d-bcc90d2d3963" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2375.681235] env[61855]: DEBUG nova.network.neutron [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Refreshing network info cache for port 1d58cd96-5241-45a2-81e5-2903376d54c4 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2375.852981] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2375.853251] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Processing image c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2375.853585] env[61855]: DEBUG oslo_concurrency.lockutils [None req-bebc10f5-7197-45e1-ab24-923400d04998 tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2375.932938] env[61855]: DEBUG nova.network.neutron [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Updated VIF entry in instance network info cache for port 1d58cd96-5241-45a2-81e5-2903376d54c4. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2375.933324] env[61855]: DEBUG nova.network.neutron [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Updating instance_info_cache with network_info: [{"id": "1d58cd96-5241-45a2-81e5-2903376d54c4", "address": "fa:16:3e:15:9e:c3", "network": {"id": "bc009904-ba15-4b95-8a6e-9d94a48fb018", "bridge": "br-int", "label": "tempest-ServersTestJSON-633278838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a6c123dab04b01868b291d2b953e75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f925dc8-2145-457e-a4d4-c07117356dd0", "external-id": "nsx-vlan-transportzone-356", "segmentation_id": 356, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d58cd96-52", "ovs_interfaceid": "1d58cd96-5241-45a2-81e5-2903376d54c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2375.943253] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Releasing lock "refresh_cache-e8d09476-022c-4c0b-907d-bcc90d2d3963" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2375.943488] env[61855]: DEBUG nova.compute.manager [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Received event network-vif-plugged-404934cc-7c92-4d82-8ee7-78fb9e34f3f5 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2375.943685] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Acquiring lock "84197864-dc04-4eec-80d4-7800b2892507-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2375.943890] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Lock "84197864-dc04-4eec-80d4-7800b2892507-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.944065] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Lock "84197864-dc04-4eec-80d4-7800b2892507-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.944235] env[61855]: DEBUG nova.compute.manager [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] No waiting events found dispatching network-vif-plugged-404934cc-7c92-4d82-8ee7-78fb9e34f3f5 {{(pid=61855) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2375.944402] env[61855]: WARNING nova.compute.manager [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Received unexpected event network-vif-plugged-404934cc-7c92-4d82-8ee7-78fb9e34f3f5 for instance with vm_state building and task_state spawning. [ 2375.944566] env[61855]: DEBUG nova.compute.manager [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Received event network-changed-404934cc-7c92-4d82-8ee7-78fb9e34f3f5 {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2375.944725] env[61855]: DEBUG nova.compute.manager [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Refreshing instance network info cache due to event network-changed-404934cc-7c92-4d82-8ee7-78fb9e34f3f5. {{(pid=61855) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2375.944919] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Acquiring lock "refresh_cache-84197864-dc04-4eec-80d4-7800b2892507" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2375.945064] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Acquired lock "refresh_cache-84197864-dc04-4eec-80d4-7800b2892507" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2375.945223] env[61855]: DEBUG nova.network.neutron [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Refreshing network info cache for port 404934cc-7c92-4d82-8ee7-78fb9e34f3f5 {{(pid=61855) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2376.247478] env[61855]: DEBUG nova.network.neutron [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Updated VIF entry in instance network info cache for port 404934cc-7c92-4d82-8ee7-78fb9e34f3f5. {{(pid=61855) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2376.247850] env[61855]: DEBUG nova.network.neutron [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Updating instance_info_cache with network_info: [{"id": "404934cc-7c92-4d82-8ee7-78fb9e34f3f5", "address": "fa:16:3e:45:a9:bf", "network": {"id": "df492e78-1ffb-4ea3-8471-431b597750b6", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-895209242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ec8ab8fc8404ec8a37780aa6e6fd40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap404934cc-7c", "ovs_interfaceid": "404934cc-7c92-4d82-8ee7-78fb9e34f3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2376.257014] env[61855]: DEBUG oslo_concurrency.lockutils [req-6b99264f-977b-4ecc-a1bd-6b1908414e2f req-7e3521a0-2486-4f27-bf39-cec7f3291ddb service nova] Releasing lock "refresh_cache-84197864-dc04-4eec-80d4-7800b2892507" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2377.360506] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07a370a1-b567-4775-82df-ef007eac2f2a tempest-ServerDiskConfigTestJSON-319860424 tempest-ServerDiskConfigTestJSON-319860424-project-member] Acquiring lock "bb506aaf-a599-4619-8035-a60952973f0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.905667] env[61855]: WARNING oslo_vmware.rw_handles [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2377.905667] env[61855]: ERROR oslo_vmware.rw_handles [ 2377.906139] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2377.907893] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2377.908181] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Copying Virtual Disk [datastore2] vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/54ccd2fd-26ff-4119-8582-5d127bbba0a4/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2377.908447] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2374a1ff-51cc-43b8-b23c-130a78a84a79 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.916033] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 2377.916033] env[61855]: value = "task-4303000" [ 2377.916033] env[61855]: _type = "Task" [ 2377.916033] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.924224] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': task-4303000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.426615] env[61855]: DEBUG oslo_vmware.exceptions [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2378.427137] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2378.427418] env[61855]: ERROR nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2378.427418] env[61855]: Faults: ['InvalidArgument'] [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Traceback (most recent call last): [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] yield resources [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self.driver.spawn(context, instance, image_meta, [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self._fetch_image_if_missing(context, vi) [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] image_cache(vi, tmp_image_ds_loc) [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] vm_util.copy_virtual_disk( [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] session._wait_for_task(vmdk_copy_task) [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] return self.wait_for_task(task_ref) [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] return evt.wait() [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] result = hub.switch() [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] return self.greenlet.switch() [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self.f(*self.args, **self.kw) [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] raise exceptions.translate_fault(task_info.error) [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Faults: ['InvalidArgument'] [ 2378.427418] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] [ 2378.428560] env[61855]: INFO nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Terminating instance [ 2378.429286] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2378.429494] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2378.429749] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b199376-76f2-4455-bb49-4476b73fe1c3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.432045] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2378.432229] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2378.432938] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcddc2d-5de1-4d07-965d-1edaa7603d42 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.439641] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2378.439900] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4aca825b-0eff-4663-81d3-2773f0784020 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.442269] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2378.442443] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2378.443396] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a67202-1374-4adb-a2af-788124ec4b0f {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.448108] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 2378.448108] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52fe882f-23cd-5517-9bea-862276635c49" [ 2378.448108] env[61855]: _type = "Task" [ 2378.448108] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2378.455274] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52fe882f-23cd-5517-9bea-862276635c49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.512024] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2378.512024] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2378.512024] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Deleting the datastore file [datastore2] 97cb77dc-8752-4d84-a2f3-9c22b241fa9d {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2378.512024] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93e533a0-9e96-4823-8e82-c9b1edb056a0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.515896] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for the task: (returnval){ [ 2378.515896] env[61855]: value = "task-4303002" [ 2378.515896] env[61855]: _type = "Task" [ 2378.515896] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2378.523302] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': task-4303002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.958694] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2378.958967] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating directory with path [datastore2] vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2378.959227] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bafc331e-d321-4e76-9519-34597cf092d0 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.970392] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Created directory with path [datastore2] vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2378.970580] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Fetch image to [datastore2] vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2378.970762] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2378.971502] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da12c0e-70a6-4b70-8268-d5267970fd59 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.977727] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc09124-325c-42f3-8042-0e37ee8754f4 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.987740] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e9708e-ce27-400a-897c-4b8ad18e2014 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.021790] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e68f89-38b7-4e06-9969-91be65d6144d {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.028310] env[61855]: DEBUG oslo_vmware.api [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Task: {'id': task-4303002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076476} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2379.029785] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2379.029980] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2379.030170] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2379.030345] env[61855]: INFO nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2379.032127] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5e141eb5-ebe7-43c2-a736-6de5b759f62a {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.033901] env[61855]: DEBUG nova.compute.claims [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2379.034090] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.034309] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.057128] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2379.111316] env[61855]: DEBUG oslo_vmware.rw_handles [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2379.171212] env[61855]: DEBUG oslo_vmware.rw_handles [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2379.171401] env[61855]: DEBUG oslo_vmware.rw_handles [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2379.230223] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977ea078-f033-4c30-9b72-9d021645cf50 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.238270] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c6c059-ce98-4b79-91fb-796682d28b3e {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.270640] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c449be21-89e9-44ce-b450-f064f1c7a0e7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.277629] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69dafda1-5665-452f-8c42-cf7eed9b6b1b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.290651] env[61855]: DEBUG nova.compute.provider_tree [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2379.300987] env[61855]: DEBUG nova.scheduler.client.report [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2379.314856] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.280s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.315393] env[61855]: ERROR nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.315393] env[61855]: Faults: ['InvalidArgument'] [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Traceback (most recent call last): [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self.driver.spawn(context, instance, image_meta, [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self._fetch_image_if_missing(context, vi) [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] image_cache(vi, tmp_image_ds_loc) [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] vm_util.copy_virtual_disk( [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] session._wait_for_task(vmdk_copy_task) [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] return self.wait_for_task(task_ref) [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] return evt.wait() [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] result = hub.switch() [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] return self.greenlet.switch() [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] self.f(*self.args, **self.kw) [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] raise exceptions.translate_fault(task_info.error) [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Faults: ['InvalidArgument'] [ 2379.315393] env[61855]: ERROR nova.compute.manager [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] [ 2379.316157] env[61855]: DEBUG nova.compute.utils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2379.317435] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Build of instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d was re-scheduled: A specified parameter was not correct: fileType [ 2379.317435] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2379.317868] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2379.318063] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2379.318246] env[61855]: DEBUG nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2379.318412] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2379.973521] env[61855]: DEBUG nova.network.neutron [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2379.993349] env[61855]: INFO nova.compute.manager [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Took 0.67 seconds to deallocate network for instance. [ 2380.095388] env[61855]: INFO nova.scheduler.client.report [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Deleted allocations for instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d [ 2380.116832] env[61855]: DEBUG oslo_concurrency.lockutils [None req-14f0ec58-8a0b-4f8d-8552-ade70e4b5d0c tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 514.818s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.117187] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 319.823s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.117434] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.117673] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.117879] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.120212] env[61855]: INFO nova.compute.manager [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Terminating instance [ 2380.122220] env[61855]: DEBUG nova.compute.manager [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2380.122428] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2380.122701] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94c48453-3069-4e85-997d-c9a967276d5b {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.131501] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1f48a3-c7f9-4d01-b17b-9460c4e52ca3 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.157886] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97cb77dc-8752-4d84-a2f3-9c22b241fa9d could not be found. [ 2380.158106] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2380.158293] env[61855]: INFO nova.compute.manager [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2380.158543] env[61855]: DEBUG oslo.service.loopingcall [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2380.159100] env[61855]: DEBUG nova.compute.manager [-] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2380.159209] env[61855]: DEBUG nova.network.neutron [-] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2380.185725] env[61855]: DEBUG nova.network.neutron [-] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.194167] env[61855]: INFO nova.compute.manager [-] [instance: 97cb77dc-8752-4d84-a2f3-9c22b241fa9d] Took 0.03 seconds to deallocate network for instance. [ 2380.279992] env[61855]: DEBUG oslo_concurrency.lockutils [None req-9c4f802f-2a77-44c4-8481-98a119d20bf1 tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Lock "97cb77dc-8752-4d84-a2f3-9c22b241fa9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2382.237741] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.254061] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Getting list of instances from cluster (obj){ [ 2382.254061] env[61855]: value = "domain-c8" [ 2382.254061] env[61855]: _type = "ClusterComputeResource" [ 2382.254061] env[61855]: } {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2382.255359] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f8234c-93ad-46f5-a448-65091be5de27 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.269633] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Got total of 6 instances {{(pid=61855) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2382.269875] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 455d56f3-71f3-4024-a52b-8fd59ca923d7 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2382.270041] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid aa42a06f-c679-4530-8762-15ea8dc35d9b {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2382.270214] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid bb506aaf-a599-4619-8035-a60952973f0c {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2382.270376] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 127cd38a-4d53-4ca0-a806-e178937d7fe2 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2382.270535] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid e8d09476-022c-4c0b-907d-bcc90d2d3963 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2382.270688] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Triggering sync for uuid 84197864-dc04-4eec-80d4-7800b2892507 {{(pid=61855) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2382.271062] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.271289] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "aa42a06f-c679-4530-8762-15ea8dc35d9b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.271540] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "bb506aaf-a599-4619-8035-a60952973f0c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.271715] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "127cd38a-4d53-4ca0-a806-e178937d7fe2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.271907] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "e8d09476-022c-4c0b-907d-bcc90d2d3963" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.272127] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "84197864-dc04-4eec-80d4-7800b2892507" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2401.954621] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2401.954995] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2402.924630] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2402.924891] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2404.926615] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.923730] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.923974] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61855) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2407.926014] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.926014] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Starting heal instance info cache {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2407.926014] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Rebuilding the list of instances to heal {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2407.945564] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2407.945802] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2407.945958] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: bb506aaf-a599-4619-8035-a60952973f0c] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2407.946110] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 127cd38a-4d53-4ca0-a806-e178937d7fe2] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2407.946254] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: e8d09476-022c-4c0b-907d-bcc90d2d3963] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2407.946380] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 84197864-dc04-4eec-80d4-7800b2892507] Skipping network cache update for instance because it is Building. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2407.946504] env[61855]: DEBUG nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Didn't find any instances for network info cache update. {{(pid=61855) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2409.925052] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2409.936110] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2409.936341] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2409.936511] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2409.936666] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61855) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2409.937786] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7db8b3-9911-4391-abff-69dab0a76000 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.946508] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880b7af4-5d69-4fef-a108-71f64bce2ade {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.960227] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4619a8-0729-4c60-97b5-019dc5a00873 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.966404] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbb2fba-7b9c-4c30-ab8e-b24d1791a550 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.995831] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180570MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61855) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2409.995982] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2409.996709] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2410.054550] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2410.054725] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance aa42a06f-c679-4530-8762-15ea8dc35d9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2410.054856] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance bb506aaf-a599-4619-8035-a60952973f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2410.054981] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 127cd38a-4d53-4ca0-a806-e178937d7fe2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2410.055118] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance e8d09476-022c-4c0b-907d-bcc90d2d3963 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2410.055243] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Instance 84197864-dc04-4eec-80d4-7800b2892507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61855) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2410.055432] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2410.055574] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=110GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61855) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2410.070929] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing inventories for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2410.084413] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating ProviderTree inventory for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2410.084638] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Updating inventory in ProviderTree for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2410.095112] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing aggregate associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, aggregates: None {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2410.113484] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Refreshing trait associations for resource provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61855) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2410.195060] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c40b81-55b6-43d9-9e07-4be65c917e18 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.202719] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a0d684-9530-4bff-801a-0d49aacc0e84 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.231799] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211bb733-ec3e-4c87-8a58-bec25fe2ca48 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.238572] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf147d3-dd43-4a92-96b3-2359f32f1dac {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.251227] env[61855]: DEBUG nova.compute.provider_tree [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2410.259366] env[61855]: DEBUG nova.scheduler.client.report [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2410.273884] env[61855]: DEBUG nova.compute.resource_tracker [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61855) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2410.274080] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.277s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.274582] env[61855]: DEBUG oslo_service.periodic_task [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61855) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2425.227469] env[61855]: WARNING oslo_vmware.rw_handles [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles response.begin() [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2425.227469] env[61855]: ERROR oslo_vmware.rw_handles [ 2425.228545] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Downloaded image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2425.229695] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Caching image {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2425.229939] env[61855]: DEBUG nova.virt.vmwareapi.vm_util [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Copying Virtual Disk [datastore2] vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk to [datastore2] vmware_temp/5c4a022d-888c-4bb3-9d7a-7d3264aa385d/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk {{(pid=61855) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2425.230308] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9221274b-c8ec-4135-9aeb-7678145c56c9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.238283] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 2425.238283] env[61855]: value = "task-4303003" [ 2425.238283] env[61855]: _type = "Task" [ 2425.238283] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2425.246236] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4303003, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2425.749235] env[61855]: DEBUG oslo_vmware.exceptions [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Fault InvalidArgument not matched. {{(pid=61855) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2425.749527] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2425.750090] env[61855]: ERROR nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2425.750090] env[61855]: Faults: ['InvalidArgument'] [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Traceback (most recent call last): [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] yield resources [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self.driver.spawn(context, instance, image_meta, [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self._fetch_image_if_missing(context, vi) [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] image_cache(vi, tmp_image_ds_loc) [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] vm_util.copy_virtual_disk( [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] session._wait_for_task(vmdk_copy_task) [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] return self.wait_for_task(task_ref) [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] return evt.wait() [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] result = hub.switch() [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] return self.greenlet.switch() [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self.f(*self.args, **self.kw) [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] raise exceptions.translate_fault(task_info.error) [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Faults: ['InvalidArgument'] [ 2425.750090] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] [ 2425.751522] env[61855]: INFO nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Terminating instance [ 2425.752112] env[61855]: DEBUG oslo_concurrency.lockutils [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/c2cb7e7f-87ef-46fd-89b3-fd48992318fc.vmdk" {{(pid=61855) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2425.752361] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2425.752610] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6ffe5bb-89ce-4a8a-90ef-2d05b3235537 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.754713] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2425.754906] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2425.755672] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc908d0b-f1ca-42c3-84f8-e8d632b73d89 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.762296] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Unregistering the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2425.762503] env[61855]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cfd903a-c5d1-41d0-82c7-bbb13e9b42d6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.764566] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2425.764744] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61855) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2425.765760] env[61855]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5d9b03d-a760-4adb-a173-6bab9b9d73b7 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.771413] env[61855]: DEBUG oslo_vmware.api [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Waiting for the task: (returnval){ [ 2425.771413] env[61855]: value = "session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52093ffa-7407-44a9-e093-0d9db8cadea1" [ 2425.771413] env[61855]: _type = "Task" [ 2425.771413] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2425.778616] env[61855]: DEBUG oslo_vmware.api [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Task: {'id': session[527656d5-3eba-7a2b-0537-e6d19400a2f1]52093ffa-7407-44a9-e093-0d9db8cadea1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2425.824095] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Unregistered the VM {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2425.824329] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Deleting contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2425.824495] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleting the datastore file [datastore2] 455d56f3-71f3-4024-a52b-8fd59ca923d7 {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2425.824756] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ef60842-5472-40bb-b53d-fd7f45cc40df {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.830370] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for the task: (returnval){ [ 2425.830370] env[61855]: value = "task-4303005" [ 2425.830370] env[61855]: _type = "Task" [ 2425.830370] env[61855]: } to complete. {{(pid=61855) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2425.837536] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4303005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2426.282186] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Preparing fetch location {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2426.282570] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating directory with path [datastore2] vmware_temp/0f1f0684-d958-4907-ab72-58355bc6632f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2426.282690] env[61855]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a60ae81f-c982-4bf9-9317-9ff3ca1f2588 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.294376] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Created directory with path [datastore2] vmware_temp/0f1f0684-d958-4907-ab72-58355bc6632f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc {{(pid=61855) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2426.294576] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Fetch image to [datastore2] vmware_temp/0f1f0684-d958-4907-ab72-58355bc6632f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk {{(pid=61855) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2426.294755] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to [datastore2] vmware_temp/0f1f0684-d958-4907-ab72-58355bc6632f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk on the data store datastore2 {{(pid=61855) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2426.295575] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcce016-1ddf-461a-a62c-41fcd2f1ecb6 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.301939] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442c6329-b8e6-4543-9321-62ee983ec341 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.310783] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d99223-ba6b-4560-8caf-d75751a50ee9 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.344504] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b1b9e4-3334-4900-a4e8-3cfcaa65b261 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.351091] env[61855]: DEBUG oslo_vmware.api [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Task: {'id': task-4303005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072794} completed successfully. {{(pid=61855) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2426.352484] env[61855]: DEBUG nova.virt.vmwareapi.ds_util [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted the datastore file {{(pid=61855) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2426.352682] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Deleted contents of the VM from datastore datastore2 {{(pid=61855) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2426.352859] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2426.353055] env[61855]: INFO nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2426.354809] env[61855]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2bdafd60-2740-47d4-ae0d-888e3cc2d531 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.356690] env[61855]: DEBUG nova.compute.claims [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Aborting claim: {{(pid=61855) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2426.356864] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2426.357095] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2426.380025] env[61855]: DEBUG nova.virt.vmwareapi.images [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] [instance: aa42a06f-c679-4530-8762-15ea8dc35d9b] Downloading image file data c2cb7e7f-87ef-46fd-89b3-fd48992318fc to the data store datastore2 {{(pid=61855) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2426.434459] env[61855]: DEBUG oslo_vmware.rw_handles [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f1f0684-d958-4907-ab72-58355bc6632f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2426.498824] env[61855]: DEBUG oslo_vmware.rw_handles [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Completed reading data from the image iterator. {{(pid=61855) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2426.499045] env[61855]: DEBUG oslo_vmware.rw_handles [None req-ceb28fa1-9537-428b-85ad-99985eb2ff4b tempest-DeleteServersTestJSON-234240672 tempest-DeleteServersTestJSON-234240672-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f1f0684-d958-4907-ab72-58355bc6632f/c2cb7e7f-87ef-46fd-89b3-fd48992318fc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61855) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2426.558355] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ca61ea-4d01-43b8-96f7-7fc0fd2bbb40 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.566112] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9aff3f5-8b8d-4575-892e-24ac20fe31ed {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.595273] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce6ccd4-9890-4d79-9cd8-572d52debe80 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.602178] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78051a61-bead-4fef-b6c7-2f9e55a8b895 {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.614923] env[61855]: DEBUG nova.compute.provider_tree [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed in ProviderTree for provider: a9796bef-9c86-40e9-9cad-f1ac6217d1ad {{(pid=61855) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2426.623786] env[61855]: DEBUG nova.scheduler.client.report [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Inventory has not changed for provider a9796bef-9c86-40e9-9cad-f1ac6217d1ad based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61855) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2426.638381] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.281s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2426.638939] env[61855]: ERROR nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2426.638939] env[61855]: Faults: ['InvalidArgument'] [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Traceback (most recent call last): [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self.driver.spawn(context, instance, image_meta, [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self._fetch_image_if_missing(context, vi) [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] image_cache(vi, tmp_image_ds_loc) [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] vm_util.copy_virtual_disk( [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] session._wait_for_task(vmdk_copy_task) [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] return self.wait_for_task(task_ref) [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] return evt.wait() [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] result = hub.switch() [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] return self.greenlet.switch() [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] self.f(*self.args, **self.kw) [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] raise exceptions.translate_fault(task_info.error) [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Faults: ['InvalidArgument'] [ 2426.638939] env[61855]: ERROR nova.compute.manager [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] [ 2426.640383] env[61855]: DEBUG nova.compute.utils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] VimFaultException {{(pid=61855) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2426.641213] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Build of instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 was re-scheduled: A specified parameter was not correct: fileType [ 2426.641213] env[61855]: Faults: ['InvalidArgument'] {{(pid=61855) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2426.641610] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Unplugging VIFs for instance {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2426.641787] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61855) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2426.641964] env[61855]: DEBUG nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2426.642147] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2427.034557] env[61855]: DEBUG nova.network.neutron [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2427.049643] env[61855]: INFO nova.compute.manager [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Took 0.41 seconds to deallocate network for instance. [ 2427.149198] env[61855]: INFO nova.scheduler.client.report [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Deleted allocations for instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 [ 2427.172178] env[61855]: DEBUG oslo_concurrency.lockutils [None req-07eddb05-98de-430d-ac63-6db5634d985a tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 449.617s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2427.172555] env[61855]: DEBUG oslo_concurrency.lockutils [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 252.993s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2427.172780] env[61855]: DEBUG oslo_concurrency.lockutils [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Acquiring lock "455d56f3-71f3-4024-a52b-8fd59ca923d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2427.172990] env[61855]: DEBUG oslo_concurrency.lockutils [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2427.173183] env[61855]: DEBUG oslo_concurrency.lockutils [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2427.175569] env[61855]: INFO nova.compute.manager [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Terminating instance [ 2427.176749] env[61855]: DEBUG nova.compute.manager [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Start destroying the instance on the hypervisor. {{(pid=61855) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2427.176942] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Destroying instance {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2427.177448] env[61855]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29816d3b-2285-4650-85e8-c34fe70e25fe {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.186986] env[61855]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7db48da-7a5a-439b-ae65-cdb9fce848eb {{(pid=61855) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.214011] env[61855]: WARNING nova.virt.vmwareapi.vmops [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 455d56f3-71f3-4024-a52b-8fd59ca923d7 could not be found. [ 2427.214243] env[61855]: DEBUG nova.virt.vmwareapi.vmops [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Instance destroyed {{(pid=61855) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2427.214430] env[61855]: INFO nova.compute.manager [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2427.214675] env[61855]: DEBUG oslo.service.loopingcall [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61855) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2427.215184] env[61855]: DEBUG nova.compute.manager [-] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Deallocating network for instance {{(pid=61855) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2427.215288] env[61855]: DEBUG nova.network.neutron [-] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] deallocate_for_instance() {{(pid=61855) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2427.241029] env[61855]: DEBUG nova.network.neutron [-] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Updating instance_info_cache with network_info: [] {{(pid=61855) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2427.250801] env[61855]: INFO nova.compute.manager [-] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] Took 0.04 seconds to deallocate network for instance. [ 2427.344819] env[61855]: DEBUG oslo_concurrency.lockutils [None req-26d49dd1-b3bf-4fd2-b525-acb67ee78fa0 tempest-ServersTestJSON-1669392497 tempest-ServersTestJSON-1669392497-project-member] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2427.345623] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 45.075s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2427.345812] env[61855]: INFO nova.compute.manager [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] [instance: 455d56f3-71f3-4024-a52b-8fd59ca923d7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2427.345988] env[61855]: DEBUG oslo_concurrency.lockutils [None req-d50e0bcc-122c-4eec-9dec-90ad321503e0 None None] Lock "455d56f3-71f3-4024-a52b-8fd59ca923d7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2454.590205] env[61855]: DEBUG oslo_concurrency.lockutils [None req-b6552795-e02e-406f-99d4-db99c4b7525a tempest-AttachVolumeNegativeTest-1164086370 tempest-AttachVolumeNegativeTest-1164086370-project-member] Acquiring lock "127cd38a-4d53-4ca0-a806-e178937d7fe2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61855) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}}